var/home/core/zuul-output/0000755000175000017500000000000015134627230014530 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015134635443015501 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000312636415134635361020274 0ustar corecore:sikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfhuB ?+}6b}Wߟ/nm͊wqɻlOzN_S~𒆷7̗8zTY\].f}嗷ovϷw_>on3cvX~egQBeH,nWb m/m}*L~AzHev_uαHJ2E$(Ͽ|/+k*z>p R⥑gF)49)(oՈ7_k0m^p9PneQn͂YEeeɹ ^ʙ|ʕ0MۂAraZR}@E1%]˜(O)X(6I;Ff"mcI۫d@FNsdxό?2$&tg*Y%\ߘfDP'F%Ab*d@ʖ7w+D"ޮrFg4b13`Cfw&:ɴ@=yN,z'vǛVB} eRB0R딏]dP>Li.`|!>ڌj+ACl21E^#QDuxGvZ4c$)9Ӌ2+7 s'Ϩ\WW&%8'# $9b"r>B)GF%\bi/ Ff/Bp 4YH[!taV +|i}}THW{y|cg KP3m3A- ZPmNz&BRrm·@ Ң`?T㣣 5=Xz7[qkLFyS o =uQ5k [Adt:yG "'P8[aN Ⱥw^eD6'Ύȟ >Kdg?y7|&#)3+o,2s%R>!%*XC7LnC~YB ɻ!@J$ty#&i 5ܘ=ЂK]IIɻ]rwbXh)g''H_`!GKF5/O]Zڢ>:O񨡺ePӋ&56zGnL!?lJJYq=Wo/"IyQ4\:y|6h6dQX0>HTG5QOuxMXʎ; 9Gacm_jY-y`)͐o΁GWo(C U ?}aK+,M0}ٳv0^͐ J+D`7Pa`,u{c'F\ӊx3M2ֻ8GC/zE'`9ƭZr(&e'd,LFlPh ۬rN囋{V5e1߯F1>9r;:J_T{*T\hVQxi0LZD T{ /WHc&)_[9zXC wjof'(%*-}]=UD m}}O-5UOnOA~HXwh@'ڷMVwdOoe:[t.ٗ*AqΝ6 lv?Qw):yt٘8c'8Ai؋%\78:ZBBk`E\Ƹ#Yjz_aL_8};\N<:R€ N0R৫5]^T ErVQUxM iJ ۈt.3;eg_O ξL1KiYLizp"!җӃ D>P.BvJ>mIyVVTF% tFL-*$tZm2AČAE9ϯ~ihFf&6l/MiVJ5&jNgBt90v߁R:~U 5l @mSΟ1cSCħkI&~ay!%dqO՟wX:) ťLxӛ*0q]0L'd1*-B[aL"T 1dȂGl*?%|L pSROޔ8OFz|&4@2ƭ1-RN%?i¸ `eH&MJ!&ᙢ(<<-ja0Tazkm{ GYә7}U>>a~W;D=;y|AAY'"葋_d$Ə{(he NSfX1982TH#nUC9f4j8!jQԠ"^6xd~?D `!i:ګPSPٔ3@5;ȕ}PkڪH9' DtEYZb4Ҫ8 %lg&:2JC!Mjܽ#`PJWP4Q2:IGӸۡshN+60#:mufe߿>,nbW;Wq*T+ w%fx6 %u̩1hӰc%AYW ZY~a_6_yWf`rVA@>4Cr+N\9fcۼ=z);:Oz4efb#hQ #_ފH&z!HAd |D!p=T,ښVcX_C\)d^e$tC͆IԖ{[R:+I:6&&{Ldrǒ*!;[tʡP=_RFZ Nۈ_$Lԏ6Ӿ S]/&:,,$.Ɏ\`UXSyZj 6&:$)w8щc.%=t_쾋-, vW~* ^g/ux܍>М>ӗom$rۇnu~Y݇̇TIwӜ'}A7,Q)-,=1A sK|ۜLɽy]ʸEO<-YEqKzϢ \{>dDLF amKGm+`ùXz!"ܚ: hLT!uP_T{G7C]Ch',ެJG~Jc{xt zܳ'鮱iX%x/QOݸ}V=-dZB4']a.QO:#'6RE'E3 */HAYk|z|ءPQgOiMcĚId/htLzܝ6rq/nLN?2Ǒ|;C@,US tDkQ;>" ء:9_))wF|;~(XA PLjy*#etĨB$"xㄡʪM#z?NwpGj{VjQSqbqAewQT*:ÊxtŨ!u}$K6tem@t):êtx: `)L`m#cAz~M5Ys_T=D*A+6J'2I+6֗%)F" /#&.}.HTA!045ZI{=]6bq=b|ˍ%ʺs\- c0.$\+XND]P*84[߷Q뽃J޸8iD WPC49 *#LC ءzCwS%'m'3ܚ|otoʉ!9:PZ"ρ5M^kVځIX%Gɽ 4d-bbdAJ?w:P>n^~ILl)HClb+b,PY0TEu1L/]MTB4$`H6NI\nbǛ*AyA\(u|@ [h-,j7gDTÎ4oWJ$jА 3Dcwfm#Y~!%rpWMEWMjbn(ek~iQ)à/2,?O $BrW XWz<%fpG"m%6PGEH^*JL֗J)oEv[Ң߃x[䚒}0BOnYr猸p$nu?ݣ RF]NHw2k혿q}lrCy u)xF$Z83Ec罋}[εUX%}< ݻln"sv&{b%^AAoۺ(I#hKD:Bߩ#蘈f=9oN*.Ѓ M#JC1?tean`3-SHq$2[ĜSjXRx?}-m6Mw'yR3q㕐)HW'X1BEb $xd(21i)//_і/Cޮm0VKz>I; >d[5Z=4>5!!T@[4 1.x XF`,?Hh]b-#3J( &uz u8.00-(9ŽZcX Jٯ^蒋*k.\MA/Xp9VqNo}#ƓOފgv[r*hy| IϭR-$$m!-W'wTi:4F5^z3/[{1LK[2nM|[<\t=3^qOp4y}|B}yu}뚬"P.ԘBn방u<#< A Q(j%e1!gkqiP(-ʢ-b7$66|*f\#ߍp{8sx[o%}wS`ýͽ>^U_S1VF20:d T2$47mSl*#lzFP_3yb.63>NKnJۦ^4*rB쑓:5Ǧ٨C.1`mU]+y_:,eXX맻c5ޖSwe݊O4L)69 War)|VϟT;Cq%KK-*i ѩQٰ`DݎGu( 꿢\cXn }7Ҫa nG{Y bcWa?\34 P U!7 _* kTuwmUr%ԀjƮĀdU#^ۈӕ3ΊeBO`^}ܖj49lnAvoI "%\;OF& wctغBܮl##mϸ.6p5k0C5PdKB g:=G<$w 24 6e/!~߽f)Q UbshY5mseڠ5_m4(sgz1v&YN2姟d4"?oWNW݃yh~%DTt^W7q.@ L⃳662G,:* $: e~7[/P%F on~$dƹɥO"dޢt|BpYqc@P`ڄj҆anCѢMU sf`Yɇك]@Rɯ?ٽf? ntպ$ˣ>TDNIGW .Z#YmDvS|]F)5vSsiExţ=8#r&ᘡĩDȈ\d cRKw*#zJ9tT :<XK*ɤwoJarExfKB4t@y[6OO6qDfEz]1,ʹB֒H ֱw;SpM8hGG&ƫEJި_1N`Ac2 GP)"nD&D #-aGoz%<ѡh (jF9L`fMN]eʮ"3_q7:.rRGT;}:֪a$)gPSj0j3hLư/7:D-F۶c}87uixoxG+5EekV{:_d* |a%ĉUHSR0=>u)oQCC;^u'}8H0]+ES,n?UU{ x~ʓOy_>?/>l8MrHID2VSsMX^"NۯDc558c&'K0L /C5YDqNe~ض˸nErc֋@aw*r܀0 a {RQXV-/p:MP\<=<^越a/bz?ܓvjIg3MN4:]U]STa,@OKd9kmjX( \;/ɩeWL5(I83[ T Jl([HʣG-zS@Gmsu6ֶT2i%xG]eΫM '콬)ϖ{_kR0gldd]C6Q6-2hS"چpX:Q|I(# lͩfSGVX: sty> TB2hKFTQ5<p6+:/>CDޘGٻAL2Ktҙ2UV2::-yPMy:8!si]DcIh)Y KËOkY:eѸ?36S{x3%uU^N'ixn]ud$6òBO>Za_Xba_B o)$M4Wd5Ne$ͻqB2i"naHXR 8g,ni /Q}>gYA%s.ham@h#@W|UӀT}|s#2GI(t2)&3w!̽U`~2̤xG$& M.p/iy_49(tT2ߓoJ0/m?x3']x'/RH/7. wOΡgLJ>wI &A?7ggӻ][uЎw@* Y{[=q5?nZ?uizGD`E4;T_;G\sWۃjJ=_e<{>g=t҃y/da>'R&b 1Mw+yF!K wՁzqdl{,~ATG`Rp#Kd#y7I.xk*= %XK>,{$9WYM֤̺(g^5eQD$ ?-t8NF6b22& 2g}_3wL$zP~&c- LN74Qߠ?Y@}X;'%*KY[<l T'MJw}"2V-pAQk@ȺswRJgxƷ A5xꕹꜱ =k{$**ѷo?/,xov5-- j@Dݣ9%"=ZmڣC1_)ldX[Qh4lZEb"o;](ePdPd#IPrȻ{ayr (x5kRs)KP_r2"5Vzc]*_ NzkyvYs~yk@޻Dxpw\^Z)B2oBTgoSX %s7؂6vc XTx@{ݲl| d 6HF^s2cuOEBb*;!5%d*95H!;û2kTd q!y͐KRA6M,Փ=?c׫ oZAex>ٍJtCZrԓi+ĄU .xzspzŸW-[nf)U1kc W M 7-: rBv2T2)Ӎa~0Өܞ֥iԞg3gjv"y.wvWSmrsx k67VV "-7-ۓ[3iIrEj5  7oar[3r2| A.2 W a>#&d;f/Hd4iލl IڜcHwGnS t7Ɨ&#܅) 9QPXdFf|K_>]MpbJRY Q 847Iۃ)UIBv㶝տ혹ޣ)r>-V`!/Y#gѡ$o 5c4bmI? -]3_=Wuk>/[aY$Qђy S$atˍ`y,$f֡CHc3Tpu?yyn7=!҄o*g@d^$C l0ϊeG>'Z8uNxa5 ~kЛUެ֐@)Q8^-΀kWP!r "` z'f%ZMH>Y 2FQ)!x<7NibKo}Xb K}Qu3c1ȜjE.ԬoԴ!#oSDk"EfOUZ5VJ>aU?F?r1%uhE^*=/!vyyt;m 8-Cam@ BlèmuŒafyv0Ѭ?Dרp0"}.HbgYj筂kݼOǗnf ^oHEKg_mڷϝjg \MF-sxn5dX`,@NtdPwᎹ+MvE=0 q02ҟj0w|B+ڲ/Պu~*Eg{i:g Gf&;1+OC,-k{-8_Pq0&*8!%%kQq#;^j`W$9Ŝ#knvBXK\dD"y `>ldyWioMwsY7HEe= cߋ$$EaX kQͧgb[& |D1rPL KR?f}m/E}a63]^Ь\7[1L;*]vGCSdM?p>+ wZ$sсN )!"S.!d4M.R4&DCd6M0L|˟SNVVV@\kž液ڃ+sM'5.vy)2qwclg{*VW6*(D{ٛ$34bj._; RTJ{d;W$^˖jS]4,m! zK4rdeʚƀg]XaE.# IZݗ…xlsD|[$&M!3~&1dM@gs?#%`9J5YU V dٺ#bl"-r&y((D72J>˯g1>Q8_N3>;&B*F XI -XibJ9SȊ$yPVS'SM}}:61SMRQw9[!Zaf'+%bL_N.zM}% ė3>ų(̮j){7R[}M5PnJm BRcQD$o)coc ZǏ z:9 Z>%b6_/iJ]^Gm|,@z=\]e|@~Jr$|8iWpD~ &aL6zE\/A Z^rpo^'_Oj<BHb~fy]S%)V}m|X!lk҃pnU5:ZoXZlwoibclůd >04)6fbmPTi-"] /QO*(pco:{*CL…5v,V:{[g./IBfJ9u+Q·B"Q F>wTLAtUG>)d 8scM6MY 6ses~}3ɱ7¹ R$QӅ#tӚe~=W|X-9eb| X1lmU o玳,Ru!`:m:S5PAxtL)Φ *<:1VArӍ?LT%l)= Κz[Ư +RVDžub`WlUai,|Fߟ|. d3EjFi .}&bXԈedKX?uV:fhiqgTĔO5 ĩRVD7ֶ].coa@>RX=4OZS׹Un %xccDa.E h :R]cmuNVC'xZ㣪dC$1aH_d$1'/8[Z4 hEݸDt#{"dq1v>jkT9F-)L)ŘF"uVK j0=7ŰrԋwEDYp{[q`Ȝ/{ 9a>E)XOS%EGƶ*:8DO,FC#zNzySsTtFF> z+KVbZ˙#G6 `8U֕T+g_դ}Y({ -.DaPqobb,n 2w K1,jX4W->L!Uc3Fdk4ե}A>d2UؖvlX6|\!O*/m}#r*E":vSz]k/I,[,|vg>=u^\J+Ȫcv/#4RX :锉voqcU!} /5O߁ݛǪ0q,7F%IP Rڸ7p~ -cl'nRw⠫R"X3 P[ɒkX9U1>V.)W%'X +Uv`=i:ԝ!,Z#Vb rရ\sNC/T"* Q:!9WNSUXM{? 1:)6k)|DN`cv@vDN. aU9O 3u5GY \M뿆Xk3۾ `7LzP9&`|8|Ldl?J21٤MRuy}z{~n\CXߍ|}&zo쟮ͳ,rDY{yM1nu ?.^T卮7sy}X~澼;uwuSkU]_~O-=ί:n'ܸ՟ +[)/:n埢&/8n/!qۅl@Ud_h*m;' `0m!?Z:ZoBeH1Y8J-ޖcS'5Y_䁖%5ғdcY'HA[8kL8E)J\8Ɂ,GMlʂ+lh)1>Z.|>VL h5^7eM>y̆@jCxZ\c| 6?EV"sd!N@БU ^p+Jpᄟ,C'[|BUxY$߾lvi `i2k#@+j۟gWb 1؁c~L :4^ )lp 2\. }B \5>ToEχi)e0\lCMlɱĘR7HebLϧB|+QcX6.v@H98Vctq6H=m*2*q>C!9U;q ^1puOSvBHHmQ (6c94Ce|u5ӄ|  ]وb Xiצ) &5TN]SxL\{n9:3/Y|dH]m-|kۗ~sG4bzgek#+L;:)q!l0/ѝDҥ*Ha4-s>Vz[~) xs%P ع owmWuIC.-} KJI3;j !ҝd`>&5mKMf#pBT+[n*cem:y0W|c0Vvގj$Sp^M+f~D`{be@0o2cވu~-B竟} |6K-uT0\8"M:" /P:3`l' .Z cEpN9K19`ҽFpU۞]tLΦCsy#rFscCbX%E+o*ƾtF*`NΛv7oϯ +Ōa`HDN74Т C>F|$A:XB8dJWqLhnٓԻfl8fp.CDsc3k.2WM:U~{N_>!Lc/rK-vv%~ =WBX"XA:#u-9`x 92$4_!9WvT` d0ϕ_G\ Bؒ G.}ΕU&4D&m.Z9cH,HCԢ{b G-JxD+@_$c%* _jR|\:t >jz(:W֪F\ǔz;m|Ȫ~_A7¸W`uYctmc .I1:Vk7m"cKӝNNǽJOG\VrFU5q:ǫf!NRT1.D(7.8;ІWTSÜ:OUS5^ga7NWQ`1LRx+$_D +'\ U>]BfL6鞞,+Bg#[3`pO^>e7Ds~|h!.5b-EDz3M8V|&jZzˁXnlwì:8Vc1S1:Wh#O?Nm' E [%W%nlꠧ1n<Y^llmb rY״͖jiTLC\?c%0s1:W_j´KxA|Hk6Jb̯["+BLu>Ɩ=xm][4AkE`AW/B$52F$Rv%`[ ضزEM_UA| m.' L,CEaSB]<^w%?-ڏ q`U"oRs?]\MkǴ`+Cc07Ks`hfCk0Q[csK?\g,\ rֲ[.x]io$BѽnTEf3J!dqDzn~+CX|,l_B'9Dcuu|~z+wGqȅlW11/$f*0@б zrt"?ߛ+qry??ڕx2;W`Vj5l0Vm_JL-yz2,Zlu]nc˝߂~[jRuo[|["w;?2Y#tV[GT F*OO '+EoWv4jwB}Ԏѹ .MVfz0ÏABF3O>7-|йJs V[~ۗ#r#r[Fw/Jl?J21&5I媒Ǜou{u{,wߍK벼\MQn%̈v޴kӔ2͆!`co|Lu_~^ǴT?cr_$d`3 P}U=̀*E Zj/u/V ?ç%ra>Ox!8+L򹷬4/.m0E(G-^倕#{ ӡ4i̎wm_1~lIԳ@&Nv{(y#KNJIx֍ڃf4΋("U]Dp O`*NV x*öŀI VA2.#e(AKY,~D0^#PmVCUlN%CU0~4Q,)"rq'j) V1*ZDuQVTaV^$~cTn"Kr!Z'e0TXu0 vf3 ?))vSRvb@٩1nb6Bż f2 uqZ~L|.۳)B.ں'igz8~b1m]HOz(A9wX$d)[^Rk hP&sJ]gY,i9q5b$=XmǢ<<'3DH VFsZz1.3v}8vКwxڋRE#cX p,fQ@aa,cڄ8- rHq1€ ,",S? RNcǓZZ~sD*(VE1L"6 y#.# ǁu|^[uBf_:Ɔ5 aE"!90E+ߌ=(]br OLu%Q _W2V_eK"`BUo'tgT.'Sg[U*{+Q0[oY\}/$JXdK(!uU%x{ouf4  vO=P~ŀŀ_P-= : -#-xtnuex3bISeqrF@^B!=7} `xۣ甠ydjK B>A#:F"IvyBn=Gs,t],ƫ|T~ Pc!LKFJtt.hRO] v@'.|i{%T]d6^ohՂA::_4'u-TRumI&(.ć 7*<3.tڌa,@^뀞}m7 q:U^_۸M "\ ɵ4V`N :w'{n59+>PB9^Y$gۣ& B$)<#X84=Sq.@?e]~:Kvuw?MmWt>< TN筷7BpUa &pp^v.GEJ'T%oVE _ PYcd>JbjjSK  6-uOm  ȗ*׬$ wڄ"/z_Ǵ[jp5_/A;>EN X!i*Hc53J V,/ sM#uuݏ\u1]Z.Rօ%iR]BEZpp^W[8C1˯ktBzRa7 QIv&ˢΫM_͈mnW:+{ގ r#sEs5 BܨӒB5*Gqfjե)8+V \2X<8U D]Ը<ˮb8X 1/`4ZT0K$Pk$&es#ny/!~g$QhQh!|<'kHbM"c kԝL l1QߺP ~~c#DŶ'wиmn_8^=SM5oepgR=lVsH0VQle*oUI+؅V%tƴ;a5v#L͎òF49~>!p]VG%ZJٗ:1(_2+nj>nNym)sqt-aUDRwDjV# [H3FJ!P)'rf(Ĵ Y$ d:U':z֎oTx-pnn\ApdX/jdM_"--!# ږm)"Up[W 0Pj.z6e̩({!D\AzXnborL)MG dueQf(r({H߅\yHRNKJu%61K?| US< c)GLԞo%U+ܕzwkۑn2u!i):љ)*|uog؃njK[Y@LǶึ9l[iCҬ&xN'Q4.`Le倖V/JҺjJXeTl0u m52m3Y]k!rICK6WFs* U \[ȒժpO55+yޜ*Ljdjx7U< G\gUG@GpKG0XnkS1&dcfj]]")Ejws_dcͽ/R lXП1c.gsúg 8$H8n9.i/zK1"jܻ|h,Ă &6G9"{4}EeH'jA,S>޺+ ԾʢWPm,>ew anG&4C=MaU´m.QkXuٻMќ2Y YfITP.u bt"'u9I4Uu=u^2an_H;;R.@ tH$\JTȴ:O<TƢHVwTo ]-'C)!=SO[͖Xw -]ZB-&IƁK3 M%B"+V_Ԍ?`kƁ.xR# ,g7m w:i75k)%Jp[^0 vlv+7DK/,wl#WPyk Ǜ3ꓬ )C]¿V6>\U[kE3P`?Umbڮ CM8> b:E7ж"ġS"8,">`2nJC.oUr,ݬ@Κo][Z'Fh#tL-6++Z,MV[O0=OG6.tΎK6N!<]M}l\{M[(t:wd=ّ^_OIbZKeɲ)+ut?ўTgh?;t5.ч'.mm'ŋ:R&ji52.!,]~[lk Wޙz>0$fQR0-cч-,NEtp^'I;bҧG{o3ñ.IJ!ѺF$8>jAo3pt'iYd UXi]R_؋ܗ)\ŭbyץJ%kIiK!*g sC"L37ѫLײWRv/GxR%f c4oD> QUyk^7ּ@aMEz[iWIٞWbR]pdl<#YbWO֕ Wr:)ry;^kN3biTaO57ֳHlN7à ŗ_D\=3uF07~s9iϷw{-&h70@t-qÃ!B n'<ٚ$ ͨ$c0I$@y8" \RWLSg;{x( |o,e8uk;q@Xy^O 1]Et1BD֩9$f&80 $I(=MkNŬ|#] s۶+^SYy8vg")$jIɎדDɲ#tmFI9x|<1 O;_K={#TW|Q&LA)yDq4mW=j:vI  Љ:T*t8R=ni㏷Ӵ`m.تg: VL& ZC$~dLM֔tv2-o8)@?I"|ރZ-z=sҕ Mĕ3&"_V}3nE'|!Qjo-<>gӃ?w#W#.S"ߜv3 µK),>^61;3$.}a?b]b) w ]4F{I#LhL`~(_n")bOtv{At P "Zqo HNMdG@>u%ޫ#V;"w`&pҤ7!s@1p0P?SrQ]!!3ڞL 6t^RBǴ9Bž?㱕_,,{UR@&~Xzh[If`~xǶ(ŵ{vdXv*ٟAn@x63U@Kl/K&qP! 1n[dINEW%78Pa1vKnpmn=~GBC芇YI.N9%NXҡ[{HtkG:Z:sSHiBo'Nt| <[tJ(ZzAw9 piK:]H3THk 雌r"}#ocdgog8Av"]CsE1.Vw3iBn/ZHD Ό}.b4yi3"u #~ i-$unCqxPBG)rb"YI^Mk=EgeL 2uI`ͼ(fKЬ"6K)c FeIހ9IAeTAt˛xٴ.TF {;zMOdeX菲A: ?Xi܎i:zp}ը]>uʙ mr]nXکCfWLRhս,Wi (a/,xӯӬX 66w BTSC?-K(YXlm*C.sej-oM JGT9-x^"p<9frx@~q n"Eo/0짳uS^\ve8\E&!"iQ[;UP!>"4{fWzhLo8Lr(`F)0̮9$'`Ol6-~ h7E0>8x7EP\:* 2=FyO8=\ClQD&嗠ZV  #g+*:fX 5^W*A6qxzD)1O`]eZ†q46˹N~祪4Z$Qth(q6Ѣ J VHp򶜥)< iޠHeOϏ^F:OȎٮp{DH$O$_'[2M|ہ/T?P?-4f`@dP¦N28`ǝ&Y+ѧ[WkWn4q%/EH8'琩ZaIQ[F!Fc DuFNOun]{u._aֵ. >& <߇3ejVpa->Am(:&, [7lL0T|}oE 4q"Ӟ w`=EOI'I^ 0~"C]OnKF5t֗&䁄?㼸M֗"ڎ Z=#/Ga{#o$8N_e0֯__7J.Zi~9?U0 /\\\=(?\o`Pu8ݽ(.)]76 ߑGM5FQKНKjWp}X$w5' {rrMqޡo*R2]%37pt_pO0@boR ; I#:0b CO Cl2Q'62l5L ӁM6W,$ 6&a;I{|A”,Ѕ'ZjWԉ*;fK7xPx$슭Qȝp|:OՔ_OlXJ4BEÿD?XWuiW^# Lg1气v,fyJOU!`O 7`[QXh^by^AA"c1KMzz|ފzI 6%x= &t 4`8_~/U<UAeO((Fе*e)ۡRkZlz]حjv}j\._o!,^V4Q-uVu^Pi:; l!*OQPw AUA-{ގz[ o!wB`U` A &hUAOT(BP*BPi]ʥ3B.HL Q9XUE=M|yq~Z˻l0WYu:7rͮė9nNBJM'C^-qtXׂ38DEϸ3{=HKm=x/ U$YMCY? S4#Qi)yM_RqIʔ3 {6% a5z),Z d(M4ϲ`>NBHZq*} yi L<)):6-7p*o)U㴨4?]FD[Yo:¥>aݭ0|;%_H>=gdB>/hZEѤYxVhNkϪȁJ^9CJMԂHx<VhK)5RW(gQX) ]Eq o}-g<_h>: NSaRh=qf)GXow"a;,{T)R:KU4DHV1Ep=Agg4EX'#.,H+ vEƒ`d--=%l!tjӄL ZA}Sjd ?S5JiaPW=Skjn { kY(ON|3\*pߜa'캪6^ӗ*{d~ͪ›*W?P$L(F`!LuB*fC4wpW&{CNV%hvQ傠CVl* jXhdasڷ E"I6풫X/UK9dȡWHP&D1F]oARfqtxBRW6 k XӪ'v+RBXiȬG,0 /)5"Xx'! UUk4CrqMS)]*e*UzAF\UcJL5t#PaohjV@_^d"T'[T|Ԋy3-Ct4$CM<5U&!yVtXdV+%gK^cnNߜW80‰zߣ f}j}2t8yaF~Pj^=|zxaf/GqcW5=aN`Cv ްT?#Vg\p\tmKGRM<`WZ%C pzA#6Z(s6U fu۪>߫XGɋNaoط6f$*vNs3D1]V$bMAe{`4.wx1 NX57H0 b fXt8A Zl"418nԌZA櫂άmE ZGFM$g -=-,geų\nó#9F_m5C[K҇xuN*n|Z5Rh%fj[v>CJW`۪eZò>ut2T}ଵ[Z%d0/J}6|Re\9v*ST(i q]޷kx<{Q6Vl+VqS "s -4YqK  4^-iwVI(DUA*}uES;„H8 TPRl`m$WM Dtj*N~&I#P} gow:6{op~i F%\[1pr..,\{g*P)E;|=f 4EGuՃwDAZv(O Spļ Y0q]̦_iK ÈUMݮ L7Nfgznj>9,RF)WmWU?)w^/zNzsyXx2<}kWb)8uӝ,fO=8JV Ra)tEh\iZOf?a$3\܅ͮ'"t"o颳Py-Ûe ,$bq8b$]JsTlLN$ja٧2#Ǣgg8`Ng^]y .آ]իO1`%Nh)ॅ, 9 Ҋ-ۤ=B$v}&Qk8':|^fjB ê͢Ŧ"i4a$ͬ`hƞV „],İ:xQSqb$pYh pcϢҘu mu^pH΍N#dE<%gL͕[tسfމrfPꪜ]AySvm;~. Xj.Ai?Le4|-5`41=(;ID/B8m Lh( 0.={g$k?8k)VfLaAENlJVjjSifǟ9^x5٬BBDEzC!;5B qf#M,44gNz2qp^JT&XSI`v g%8ɼj֭++}"4Tqox0X[V4g0ׇ̚`SUm0gɃH-q]\d! 2I"hqUew%3>nUΡ\>N,Xp@<1ٮ I$Drp6xCDQK&J0EIx$dtYX`FV}uLuHd[*Dlҷ%]zHン7<]ps>+|b[K,#F{yHVq鮪GTw}^>k%OhߘiZh :ӆU=%o1)*Wa8`: O+y7ᖣļޱ}9eFq3 y_d 0Ռ3Ip}&Q=>%>(R@s5DJ&V-M6 #F/K$8MAE-#kUDEX0]1o4~lM6\Y Ek)e`$]nWHpj*tߋXbB*s4TL:F u jfI ojPb\@J|${yfqw[P=a:>Zs#Qt9J2#[C@984fiպɻ_ n2 0귳&t&+1lxpU)䓏idKjCQ9?¯ ' r_i۟<ѿξNYyw;LimNr3Ͷ*u K`ӳϸmZ.ȏvHL&U7)9o}`-fus&%$VaK6ߖD;  (V&V y CUXkPҽI#ҽzp4Av(g.Od: ,mH㾃K'||݁8.}|V5@3EBߵy1'KfT5ި: p8z Zq;tZ<`t؇s|ϯﯛ{$vWjxa2T]A,W~\ywUW ЦGPƻt7*-r@v"$]SJWEu>v/GE`CGrB(:!HۇȒL+VA0fsjHpTׅ4@|v{Ow;K j^>8x88LI'I2d3#]~mG{TU տq}D/I TGy"(å) M~Qtt7Z "4ՖL>ڧȼ黥Q915&o#{CLFuݵDpE !Ĉ(%o;2kyUyd=!# vqZGl Sp#o.pxGp1wWIRjxpZ7'|dj($š`жt_Uo KgB4\}S#3LI\ W/ugB}F!i. @vfL9`T0NgHk8۔wʳJͳ P\"5F˛UÚzQh/Y lEޅ5I% 2U+BHLH{+|>J1{1}"ȫ(_eTQ<}*usX>kJ0ʟ`-$ÿWzUI D›{~~$qp-1J1#ʾv"̭U&bv7 Pn脽eo, n|ԿNౝ5D 1V\G=[z]5MVFVT7Ϭ3ݣOeɮ᪆KfB1%[?;/Ip6cYpĘ.|bƶj(p#鲺.!q0ER=FQ=R/.mĜL#&pvil<_ˆ\K# S x%W͖bb^G.8q°S.7y&˚S< rULP]"CeQf,%QjUJ֒lYCxkΎEo4bbnb[=;lt⪨0OσIeHޤf".b4[K=Xu/"(0.|?{l򖳥2bnitwʼl1 rl 50;V"#$Glŀ'byXDvW<g+F6 &&:xs_ҧD5L!WwW58mCz<4l;¬"?h4W  8"4@yLHc$]Hp וaU0I8>kI5#*OϞ'3{D*{γK"EvUlzp'iy.jwO mbk7oKJ'qBəS%0-A\ryAI}]c9xI6 1}6Ip JJ#XMٳV\Dm1K j0|!j5z=y.Tbӷ‰b!2}EWUXkPa$]dz%qpHʆ9Qi6Ek)ebX|uLZ]#"Pz0A~_)s; Q7xDXoydkorKϗ]UBxsRҲ"kIZgGo] B1Ayu3,{׵'*qH^w#"Q)*ĹC+Il')`$r<?~"2&`gGOvR*O9D$! $]Yw8{ஊbah,>pg^ yKqD13\5bpay0G˝#Q=U+BHLH{`>r5/}DZš*=$/Sz4m#K;xl:$wm_!ޭy?EMa]ÙaYRI)iv!%J2 -@l9wc$&<1X.`E1lei#fźۅ̶^dbǮ^{>흑y\!v/=kVn?Gz*OCL.vhkLƚ)KɆnrGK2}u ,n7 J{gW,ʽ켲K^pTXIȰƚ,Nn>c.q~7Lמ ]ޝzc*Z񫼕QֻzC/RcӾD?߿ #NC3)|)q)aEa\ /B>3//oCoML0*Cnǣ&` 4yL*.6%21sÏ-y$b8# bβa} dKlx!Y'Yv)^,8JNט=%E넕a& B 򟂈w0 BV)uʏ\t+cw$ wK=+iK4x7&'eibMn*VV 䫫(/A?J<>~@Μ|Q^kY_1o85`3]gae*;H b [cF/*pTRy'UD /?2okM9A)}s^ż=.zua<`-e Ɇ/:o>:Jw/%$M򅥉eUSf8zGm4=(t (dtoGkZGOadc:ۆo ).򀷑ә>ۏ ªߚO~1E_?ϻxߛ(>>cईI-n囥b!H)b*ԝĄO~)D^ h L_ͿF_w`~U sRy~`QV%Kx3;]g\Z92.e޸ș)(l4Lã= @ o7_]}h&VU\ܨDnZc*ƺ $l҆u,y=}g4LMcm9Vת8 njglX]ݙr:i'8Wbnn i!9U_˗q42ۋ'|x5Yp2S /l|fݔ0|Ѥry7SڷCW SGi?&0ϰc3p570/w, İ".oV #Q@b_bfl8_3Y}2z|@Ri{@L|' |^s#&X )v/pyىJKe8Wg8and|_f(|nȏ|j=珼sXª7V7O;ifr,6vz\r(띗=x:K4]2Ѣkjn9*uQJ<-, IϢ$޲cVȤEm )m7pz3 l6iC|m¤‡[uH6n[em7Q} 1"OBb T$0QN]j\[E("NS(hD%HKlѬu;BuMjSS>x\vi״M ZR`&my"9WiryhXK6_GZ`7w5.l M pA\[>z~mWfe _}F JuӹGDѺZ.ȲeWofMe0ߴyӂ+z C]Lxzs)]A0o'>K/㇂9>YqA˦4sݙ5Lct;7- -S'J ej.^µAX;gtDa[ Js\ߕYwgpvuȵ< 7ZhUY%ԉXaB&%:WU?NV.Tّɶ|̬g0+`t://{{<-9LKwV9\ <̚~7N;4KmXFGwGgShBh?Pٴ; -V" ]mRk ZKU+a,I$b ߫ h^*؞fDU n=m'<"Hct0i&NբQx=fD}K9ZVzG34lY9zF%;؛=P-w' IkHOT7ų{/=ɐ<nc8Mez0uX#֬L tرر.0C0ﭐHnB_O械lDvZtxtۉXF吜jx Nϡك ob}k]~ZHsy=ۧH3~ޢ߽f=qyk;jֈ[ᄐݽeRro-{\ORw|)Qޞɾݯv)YP'%ׇȧ.юUvoCKJ <pY @GrG⩚`1ʆ)sn^| W2!xSƓ,/NLP\(N\4kDqŹ FJ1!p,! &bIw wПT$sX GsO&`4P1-<.SG!r'JLUǑQ M5jvhe` knXF&ym]6?]39.eP /{4(NC"VZI((Lžk™bIcJ ff짨5D;b 8C:k̢\uZ]0VsE<ձ*XyރQT 5*RoeN%/ʺZuXW[  bVxKb[<87smc}5C-\3jTO3jfY <'ҢjF!Wo[?]Ia WބG4 sPy,YxA"7R{L7mvΑ&5c_=r[X<;WFQZr4`."]s{/Y5oA]LHG8Qr&T#&~}?hۈ)QN4N`\N\uӎ}]/6(wcs?O! o|+ nb~:6\JW>}6ڭ1Zn[o-g? 4+o!LqʮCW{1&qj_=?ljeS: $t?W]q?J/9LqztHB/p84#"5O<ڧT:uB>jgxW;fl+eZel89;b$%pۘ2c.mDlYPzv[(Hw _6U, |uS_j]s @dwPm]YDs]a#4kk";^,|=! AbPPkb6bPW !t'F$Yr[j:^E O'N\-] rH5ȟ>#'X۪ze :U^sA:b)XSIy<^F|R "P% f{  ç ^m5Z +/ן^>?Dx-7T8y@aMbR>MVoMvJv\!Ǣܐ Qϻp"Ͳ dBff={ X75c㘊S[+ ,c}8J/7룾kGq չ}tҒ2ka+%%^j ^{ js+8'3šn(O1]0TKӱ21V hdR"I"bA2N86@x 90WWK((g:᠎9戉YC48$-Vj1 I"aDjx;%HN+7NqEklAxkKԋzV %8Oc ʼn2X<2 vcNj(aRY;fiJźuL%hUC(<`s# 9Z赍rt`f;5;r03[Ic'Bꙷlo110jkJ)FcM!Fx!Lkf%L! [%CcgH_IbnXqdqy{ĥ5 @6rjp`s%y©%1D%YMQja[:3ޣ# &ai R8 b)bI\Jó@v\ A 4V`K(lWS4AygH0 KSVHZ`8v##%Q7TFIb ?I0YIC ', rҰKj gL̼wE=iO9(mX%\nE#LG\W G+N70n4eL!H$Px2i+gȹg ~0%JW#I+ gKw؟$*¢'`5[@+ fghe R>Ie* J-5CjdB4˥cU JκsN3bn 1#781z0?$%ȥ)r8ƪm.DL3 @`gbh"3 CaYNxGVq>BZH]m&i)Ia$uTg ))AM]:qVZ=-lIPcsL>OG**P{'.zn;.ǘ?G\0`1HlYmA76Z^n-k D(GJ?oVB5^d2{NJ ~F. T4(Q2MI<<k^&Z$9_t$0!gew׊U,cV0gSi{iE -*ޯD4hIf4( 0E䌄g(7r( p֔: !;B!Èf+f'#  "5ٝno? =? P`s=vp^m;4`JWhlT %BK}gH7h,܎2ȁViuf⣮/Xk L м`i<r7JbqF{{C)9mfwHjL_;Q-y.VI7ou᧯Y;[;,+jh9-n@zs&_a}MT,]pfQ9Nee])a V¨GOGOϞO<-NtN+䍉$KtFg HJQòl|vf;otڎD>eIq0B`&ʺ;i![HZ֤vp9eTHu32ŨO]UF<6ïj |26wU'Q*d.g>gwbM # 7}7leu6U8=3x_]ӹⷫ1dr YWy{es) &o^#//-8gRߦ@$Q&; Rq⸠Q %H綡dVOz6jzDFS\{Lo ( msޭ2Ͱw?.Ʒ^Vø }'"_GJ>wo>Ɗ¬3(x?}HPfWsjNF?3"L!4̱Uuٷ73IlQ Bv;2J!-i˷HOhTI߼޽ M#y {Z%?yT?>=;VnRQ;m [xRR@;vpUee(XHyCΰ|+GgDr)*a9G'a؞0݂nGQxs-f=\cHoԓczۀȌ"f^|`]~kq\x^GXp^ckt $\ށu=J:! >|7[%eࡷIY\AL9 tXrʛzcO;JB6 KxbnpD!6bb@krt뜸;LrX?g+ݦon< @'>jWyy xŭAd>:|(MdwN]:uSo{#}~&&Ogq6̺Ex\Lx}GRc 9P78/Ƴ"~ >_8k5&Ɓ4} PӫŔ.q8{/&CMg~Mol}w ܳu` r.snZX\% P78bi#x36]Y; rp& 1``MZƗ&6!;7]eE9,@@$a'yb=/w޹>z:}Kh^Ɠ$89t]3wܮCUXri #tQ$r}wacaɛ j>4ʯo 6ܣ D0nuӷtbG|mnT]vBgSNWX1n &;VL.|kr}ScݒfE!40FD @btn,GakUjqT6PW X|$ٰyUeE~{P߀WCUBÌz_m}0I8N;ibAiV0c=֗]Tˣ͂miݸJ-Io:@Jnor?}1 @=}ߧ,ҧ*|۩ QĹ4O>f8ji?ϧ|x4F׃"9N,b1)b !5Q?)b [1!p #BBsYz_[z(Zt°YW7 2066b}UB`Vm`S훺EDzId3ڛ)wڟ_rJ vf&xJE~^(SW(6@]t؉cX|ZU oiszQ2L3j8M Ce C` .ꯨuURV4+pnR⳯2EbxCx8[SB4اUTTgO3U=X I_7eReRfʰt:m5#V&DC\ `Tf0PtvWf|X މʻTs'vxc`~Z##G-`a2`)UVϽQkB >_^*O8KG! 64;t +m|\l Ef!{8nen w_Y/1 x ћoy>e *eFfRYFa{Q_</?Z'zGquR.V4-~5}!d'<&S~u񩃄)XԙƓ MԦl珏PwqվX0QMoQ>u׉=a>akzy.z'(띮5ԉŞM20E7b[#_N=?o3`YZv @<]R֨uppp:ЌC6C x֍C}^Ss; W2s?WKf:IUTm+njv}Ǐ!ߣd> Ϙg',Yj0$DH9?_U˫{z!<$,lhbt>X|He{gN:|_}1wfw7V6_pf]V4>Njj1zW_[ ڧW`=}e3]h8L))2U OqfY,U"5X2($ IpF_ɝOM[U'&e xUBi0 TT@V$RSSe98-B,` ?TƬc[IeBG3QY7h!J>Z\u~:ՃGW ~{)kO]7z$cxQ\RƔ# kՅwǶ4Z6n1]101GR-t2M8ֲFǬW ʤK-˸4ɐSU 'aB^a5,MTR'p-1wmYz? ̇ $ SF$@&棥MڰluOߪ:Q2$?X9uxS!/op.jVnե*ڷA^[/J;*מ W0]mj[*IڳTDZ Q!ƀHU#_5 狰/9.(Zb.hQ& }fXW4"aD"6: MXX/6?xzit?ED.uf1`" !% 4Oǚ10Cab k=E[~C$+Z^Z罢3G%GdNKFdfzkL:G3ILOT)JSUQhMI.*G "BL1buX6V6)vk;x7&Q0QpX^B4+R6-ޘ.Q4iN;ΑJwʳVŒ}B5[?UmR-VӍa.>ݍ'aQxr\ \"S ](a,t&fsМ81ho-qt;yGg#6 q ?_ϫ"y]F]a?6İdgWQ>i٢zm]~fͣQ|2[^ Ze[aI몷գ-*]¸)WoGkϴKW7]V_|u7\|IoUV7seߏi2eۆ+k4~7o@VN6IƴJ5wIjC[,;Tb?'@0٣x!S*OK]>dgոބTIPF\N߮ow󝧍W䌌XǯmwJJЉ_\:o|7O_뻟=ꛟ oO~_yŤZott-աDhyhN KC>rC+{;W<7QY(We<]Ohuب :eqVmryuW*TSoZ^yTJo]m?{i, )I=vZxUkm@q4-%LQѱOVαt*]Gȃa7/$VJ&Ȝ"alTSDkMbcfjH49H\fw7`6tbFbknjcd?>*1}da)~G?壟O|Q50;ς{#՚!nC:pN5=??TϗI>~Ƅ 뙱1$˓Iē\x=qh})CT%zg٦Y<ЋOL |3P#P21ODv/'퍇}gjVR.DPm}hyρ(FGτ6'¤X3c:%"xbAsX 'vgڶ}-vUvM?+; },Rr{}W>]t#Y3k[VŸn&&$'й'WWӺ$2V]{xZ/JY)5ի \ L_w1HzTGnrDT9Ғ F͘) E)~]Q7h &cDW敌V*mbi K!jJ(1%` vI ˚w[#j [4GYu^A6q_q' AyLDbҡgE-AIowPּ;wiO/oC*i |퐧BEn&3(8E-D n%L@\yG%9niu[UBN#.c@ZrEqTE#yI3䵮/;4B Tq$aM&!✰Seͻj`@h8C {yNZyd2jEMFWԼ# ˣ͸Iʫ`‚Hyz(OjeͻBNfNaZp2+Xaqǒ"F$$ ۂ+5-;:WZ [ ̑!)9rA)d2e@-2рyW>M803 xzop[Ԑcpn~֒agqP/##rj!!}LpIa2%`vAMw[cs &EVCVKQ!ǒG$zP7O1E>5 yN}4{`CbG##F@LzB\@yWt%=Bvrv2kGQ0x|p'IaF-2рyW5>}-2l;j#޴d}|k=\%(EƫFQ[xX 0:uC[4C[Eͻ I@)C%XXQ ⨱SV?γqUx?aڳxcr?% [!`kg#:hl#CLTLyiRԼEձ}b@2X䒋 5 _os-=xE+4CpQk: sd N~xYX}4!5q:Fn591(SW(49{prZd O3֟ǹniz!OLOW'Qo!jqR狃(4foGk&?PJ/ <2~W ٢%e<}7nn>X%ן<5{83Wu}mE_:@}qP#`H$2q@%V$-KIgRּx=fiudKaJPqJ(Q9m} OP }h%%7.khzfRIXmG߸xw9FQ@8B(g4^aP[/֋wV/h&z X"/z92ZecPPccyGgqZ'6 29hAd1MY@lk6Q l(:xI3F㰏U_D62{=&$pcz|Pqrƈ`QX3pSraܢ]9//^ds 58$ :,dF'd!OYiiJ%F*6 +o8]R2Vi}4,w^#0|dr0H$VaF^L52FQX$[$VEsXQXJBN(}%ٙ|zHnJv7lPf,FXMPr%WD&*ju+)'HJNjıPQdYjVpTҼ+'=wMi12*`!8YT䈎RqOiȡдeTm;Y .j:Yw'Qn=w TT`+˃!ˆmQĉWG2|²JpvU0 ˚竾WdMsyW{̷g761y MA.;:Cmk?%;,܁?-즾CHv=Q_,I?i1, dx;}#.0AJ0 n_ڤ)J{}m CɖIEVq!9QTq.( KFۍQ෮@[~PzHUcY*[F [ ID61F7~#OyBDmg5PADH8eGNf^u^W {T7WoF|H8|.bqPrICpr1 ܺv] ([;78:*qq ;p|=8 0|=cz b5]nl~t֎k Ϡ% LNvP3N6(xeQ!IZs LqΓ8eE{1Q@{a<2@yT;c4.߱tԙ-`km fYAkt:*%q`Jh:z&Xsfac`h܌^/ ҉-T[t RUfֆp 9u !캅PvtgAl7 R4و9 (BgI^rU.WI(_Է͢{p$o$W#2?I35#e^|s$֟] 輷&X΁2^Y?=-eyӸGY#x! QuEDA Yy)-R'"^Zo<+̻,Tr9Q]zp$pͨ %ڄU1.Sv%$29WS4/0 Vke0 LfXb>rY5ӟUIupr>̺YInW{)Ԩv)()m_X+r@IADGJW*[r{p-  C5 np .mmCr`~'eƞ)m+6`t=2*"`Xk Q<|uQy(01fCF&Q!H[b6-CbQvӵ}Itq .VO:@A r3/Pܒ{YkVX|~PX1vWY@x"=8(($7#G 14/K)\UjY[qx&893Ԙ;ƁH1;FbI6%j u~tG9ČJ,bke?$"01Cwi4!D!*uUkϛӺRg0V` %o78ZnˍP7\<g\JMP+LƻEt-ẗup#{V,t@z.sLʻB~2cQ7Y1ٱj6d}vJd<.ݔ[e߇զ:]`v7= Lդ\UvS<_~Xo5kn;{{ªq~0vn>Z7lJ<]ׯ>K\خޮ7o. {v7x߄L R^+/6gG$tR Ú{˝L#2[xJz1V晢>Uq夀 ;; Pwp~P:n- -s&Kp1nXыp`]2xayأB\:ݣz!^,EM#zZ*ׁ,mG4 / "{Gd^e$hLXI #u5L$Wzo ƛմ\O >M7m:Þ*=&՜ɟ(3w?L>}''ϥ4 )ZNְ mvޞ^ƒjD31?y;9xKrRϛ{_n&?dLl5a.ɵMC~2O6d6`[ pA7x&F>65V"H98g&*1x5VANP\j۹1 U!$0.)ٻB;B0yjǃD.?䀜nʊK&HsK aa)s`-}l1b31ි\ƫQ(|1n/ f1W[ jܸCT$Db>%n:=mu@&(2<w}3\Eט kUf35e%8v$nj!9V Bϧ󤗋 9 u-;j~5MEDj|W\"RIxZ}PC:Ooq]W Vˤ,((&q^IU,'QE@zxWHZ^e[*eūt( ૢAm:T ɬ"g((BS[1F\#a`|D>$G.qmDb=8Rf2 +mpnG7)dPfXsG!4S""N!u ;b<#[)g;uXfO:}xHv2Oƽ)Bx@b!ie޳ؘBifLec/X4wN[.w.ĸ. pjQ_Z_zrrUSn4|p]0f8(]2{w8a86KXz K>z%"e"fs2ߛP3Ux*CQ+M{ݜމ!|{1mLY5SLD:ݻR!fy~{s# 5;N>(P@G_Ӹ "YP@]%ٜ{͹ ?⴨ r4'@yy]+EB"z6s2Iw!rh+"J)΋W0y8۝Y&?M. uj}ۜ~vA=ܮ K(z4,w JDvII*X8Fq32fUEn?>p#.K!IgZ/bB"}$ȕz9z:{,b0rŬ$:eq=Awvэ;eyLBlIwdX-7Qݷ֫+;*aFmW +0?I9c.m1ԟ y-*RnR̤"V*)aBDMqWS[.._(`=߄i.d[̦Wɋ/ gs;UYyLrFJjKs`TY!C՘/k|\ |{FgӸO1e9g'6э@q47l<_fFlZO_t?>=Xir|rscd#װp׫z/?CdA(ΌG[7 T\:eR!L=ꔮz ?\V dD&ħ7 ҼOy31 W|dl"F*uE m .+!W<,uLs~c :n1ȒWoEyљP%p\Σ:C 2a% \}^%Ӗz1tEeE֚lٺQ՝*Gե&UL=+g+^ 48yUiBL>qꅒyhŎnbPoGryc\\ B*`w`qK]=94FdŪ%rTlm٩m6-]b!ْ#dM//G-7}?1B Lc'T7-6 ~@Z=3xez+yZq=J:erD/b [=+mV1U B ,(GSjF؟G{xT բeTԸsq,2"fq0&²  cG rK̥;[)QXձ8dģB| V8hP(j]pYe<!pycpFU`)@f9pJW]N9(7f/[@V/i4zDSk+,NwV? x|ExE*|~*|bn1ŠLy~ܹ?'3$ayhRt Y żs{' !vhܾ JElAi8ls$:[Q+~G}B ]4XQ”wr|XCx+k|'Ӹ?>gy0a-GOy%߬.oپ~'^ l)nz .W_~!d}ׇזz֓-dCoѹ'A"c ks#yN)꾭t"ܩoVRLA}m bF#ccE=&o+[O5802ʜeDJSVDdֹT}.,"Zy!`tmvܫF[XwkZ0Y{ޅ-61]ꢉ -KP$ Kqp3&a0IQ8:-Ziҝƒk-$d.՟p~`\\\пop+^[{k|?_jjWra%,F"7Z쐳"ܤ8)Q=~&uӸB6nOg⾵Tag2~Q5Ҁ.ɓQZm5+<~dJ-M$б+My~>"?r9gn /t\"Ei%8:WFi)K9Z%QְJqk_-z L2}wJ2טbXo\K/ݽ)eQX4 Z\49tp*rn2<'idYI%*T]fQ{CsRK`YE~F Ԯqa/g>}[!6,gq>wp^:`. =pȓ #!To$z$KnEA7(.I^BW@tp(3_:. +< a I`}7Ø!ni:ȥrۊ"$ bSk/ș9L󣝥clZR30&Kn4"¨C?ѮO-yp;t31bHg95JMz9 y@Mo( k;o|k@{gDA§*IXO6%|B9P\W2xe˟BC_ ebjױzgY Xteמq lס-[OP  f7f;euH8|H ~GSgD1nFWfbpQ~ݼ<ĀtGap1*JbCJ%Q$=XI¨X_T͇xGs`?J,D1e30ACʱ0voJ.>0"1&X˵mnp!qsGQpƴ/,= `m˽(2 r^ծ}ja:VM5ivA |nb;4R2.sSFk(VҠSpw< #M5P\el&7d 6-d>^c (U}{zCѺKTy£08fdQ<:O{( M0[MHyܡI9>I$tjEI,AWx9+pr,dlJАc( Oa"٨( '<,gU!K dJ>t L( ]´< LZџ hN6۫1@DW]CC/=5:@CC:辬nDTc*%\;DŽQ6j (Q>(XA ?CDW !0Yg7fxi c9CwD(ѬBPJ$!#ƈR{cǣ,8lP4C(\L+^v6ta4C&5e ~dy]QGCxc+8'kIZMLsDscGap(,VUOeV|V<jǥ0FD5Ǚ_\Uݪ #"yG?DK$2D3F쇺£08Z>wV@MQc9;8i! ( ü;RsVYPK*n>_։4˱;P(jLp\Z0<iŅc=Ȯ)p2|鷇u=i(qci31<3~ǣ08IqMZmJ/)(ٗSZނy}An/?roW£A ˆf6?mSsHsvS쵢+j|L`UHSI[~oǺov+ k+{+w-gvl^f೸ޭ$a%AгgQъw/}&a_tI5 " 9^p#O7αt^j$猢0<'zWj[QVȾUŨ%"O_:-Tͪo606CPB$LnE 7I{ܶQU 9y%`£08D~9٪="Cv9AӉ+T8͠,,(׷/x$7wʪ0Zfwϋ~}LWxg9e#7uL·/ IAkӣͦ s xxނeǣ,8fЩ:)-ރItdjl2.[O']/Z C5@(sYm̏K â[=uh&9ldrZ:>mx^}ǣ08>|$&Q =t)(VxۄIr 4a/l).:2(º$ 멑"d3l[]n6ޘS9ǘəK{>A\yq.i;<ļ?)7yiT2_יWCd2IMYnx h<[y1c<}b)ln~eAƙ93>daTŏVڭ7A>xz>OGBxTHU§ŖY^ϰ("L\D")P̥X)Abb4YBRoϥ޲_B/`_ϥ&c EQdqJ~kO <0/I3̠Β>E4^$F^*0d4&JlBEs"c)XL ta,m4?;_#'-θ.hvlv1b/'郢[z8zk?kW/JF`jB1^Ǽm끭3>G >I{ wOmFGQ< u^-ſJ>Zz{pbD9 wf@?iutw6୚ލC #҄@Jl7g`'""<:T.\ ;{ ar(LCZqQQBɁ4տG9ncw<}ȚQ8% Q{ʭ^e7ܛ5>Zzr&c` Z/+s'Vi說C,S)ILs͝ ÑۙBP`˟7`*gHz<&Pȳ8^ՌҌ;*zYEAآ [*)`i0g^K^y8<)a^~睍I]xP<1'47Oq+j2MʳԈ+4qg<!nvßHrJ} ~>ކ]m\{+/'I'#3^B("+1W~x_< C NEQ٬"^mf)/= ^/E ^,j> e^Ǐtȸa^opDYa2F( eue;![}F;y}`U8:fgsI ]$b.1~NREUxqv %p')D5 !6ͥLY USW'[-ƹ"VʌƖ^ϼ NXӫ⃮& WD1${ׄH=r3++[n_Wz1ii*MWe[ynUu"Zn ~|69,Z,˨5i6{L\]^X7@Q8[-P&/mUyZ$+J[&Ʊ &OTT92IU|W]XAzT>;'P;Уk:)qajF{*!X*@B6uhhK ǯmO=~c˿Rݮ;KhX9*I&h~4ݷuܬM~ֆU 2ڬ2]-`3E+Mv=g ĝEv~qtifߝݻ] شzfG.])Xjƙ"BD!wgrg6ԲzLejNzf5ޮ>j߻>xݏmߕ_V]+?/_ξ])|\z 0:}?.e=~P;(@Z`ӎDiZ^ZqE ۵571r(R,-n,+N zV RoshxF }4x3ۊwMՐݏˋDqH$3j3D3,4!&U Έ'IJBaEhIcn?l7uFvmg#gdiVGvb0ך%\.bB$`M2,R˔Rns;3pe61ގL Ǘz|]m9u-x6{TP#@pa,\>^`JJXqYLli&3qLM S/}ܬM-cѼv'{Jw/wn^Of˫cnXi6zL\m0"$MȂb%6d)ij8UPCX1l$wh,' 63Zx;mFUNX"7fD.$fR1nLNX `ʝ;uYw_[ʵ 6RWXzkk M(Lrnr%L.$\Y`zɬF<ϭƩ`.22*$㿉(_|Yߤ=HưEΊMnh+SVATl{qդ(X0QdRΊs(FyD۵3Fni GPPx)^}b0*iK\.[텲rM%W ~ChR?i> I  P.}tr1 tΧ%!X :Jk`-٬m#1~6Mae918r~i4[tQtu ;0G_7/Ƿ?{LW|/a@Y%ΝM/">f PO@-N5d>exחTx%ԴSn@^F?m>\d՝j9_͠]wHfQ7kv[KQ|dimUPE&-UfU&DK]Tvd?vrwFPl;}\UN&%ܘ$9TFAF*$HpR brnMif[^4"5_c`o ?cEq"8]tum۰vF/KcHIKfi*IQk+ #:y 0DUf)J/s}–#dkĭ5:^̚kbK6=fo}Ύo&EkS_&DU`4$gяT_/@;K<&Gɀ(lz\DUΛxLrޢeumw0gQD#_(6T5z3** z2B8nmsc3%+<=.z w,q},G|;i:^f>p`Y5@gTɮK~qvVEeZi%à8:S T]ju\# Vi_32,\y{wJ ;| ^?ޑOo1?u^Js;Ra׏r'~:ZT-NIB\كr cVk}`3nи؜3jqBQ88*;m~;w`a\{AATvo_fQ![afJ.;~1~'7C4a'6CU鼻"3]˰Uo253*4NuJb41n4Ҙ=6[UAs*5QaT\-ˏjL3?q70SqSLeKFVvyx]F~HviԮB2wvҩ~D"e[c% CH@۷΃Ę>{6_= 5V BhBPO$4{M5uNWGߪG ğ#P`aI8'Č,VHI40k<&N`@:Wɧs:w\=}˾z.f<ͧF_2Ͱm.USr"z}ϫ= P0B D;).Z/4:MHELqɹYPT8!ڑǒ^ʂReЁ_F{E`=9u %V VIp\y_Ɏ?.g( ~̂fNy1؀j_H)&Vu@eF{QsX+W(bVRo"nQ}LI!tV!G<:a~p]Vk/ݏ/;q;8"-X"[)q01D( bBL.Q |{ EN*r2 p n<$$6@OrD \sɡGG%4Q0qhD&T@#-aHxQk݁lx^KcPy bh9r[jVՠv63U"%W9pA_JP[4 11`fhw=ӅnÃM, څ5IV8i PC,V`~=Vɣ+cfnOnb D NBN<eTn^3qD|>qZ DJal-!889KfyBqZfjڴ_gjé!rNsrm jgt_fԥ""̀xFzkjgt=he{@0> @QU.`P5۶K+@'=StհQRE.NX(*( eYB[LʏJhE'$:~K;qxVٻ,ɉ` (PBNLр>B>a2 %Ei bCG?SHe{?^M@1ÐKQK/|~{:ܟyWK'U՞i`SȔ \ !˔ƈRX)SPC,vB!v Ed'ngdVj ?O{O8qbK [jgåL3`?JtB/>Y%8pPHre!}LpIvp²#'*cK~+e ̛܊1v3]\hb s ºr-9õ0QC}3^R>'3Bs=qVp,ȲczXng8*DnQQXRv3Q o= "hKS$fnvڲ~0cvmE;Z+a< Z8"^xzvz9)Xee/HriCD"/xg}{'5~iGa#8b;V{&xULaݬ6n }LogoTwv /s@̷("ő#Yhgx"ZF=SAL8bנbR`# "c53q>s7tEMJCͥ81}[+qgǟ~+-sszHs.<Ǜ=DB8Tۜ|(gy8]FZ>ɹ.ݞIG<mv/1D&m g 5{o8kքx] L,IE)1Tw`s F8BiO9\g}#֔P&Fwq04ap>9<ƍb\a06\ỿ/%⫯_̸6V_akL0ջ"e6Z^Ub Z{x n,S5B^ՓWjuv,gDnNϧb$u|X:$\`<; lPŨ"|X/u~;8e\" oU^U+Z*AM ;z{4UoG\dYuPlg9ȳX;-[Y5q1 F|>]0%FqvbYn4dE,9a4,6!xLn؋l/o7o,D]#Pʵ@7ZWK*+]9Sxb҇UL8mI!&(ŨP\0*6adg4YN'`z 0]1~T~m4׻%@y~SP,kK3__#g_;\U1,oE:qϗ3)Zw ETteW`@&wbK*`󠱐&Y'0*^E?_v N틨v}f[ xv#p-t.c93[7~L9X͸f0.zc{BbQF;;W2Z. \'0׷e< }2 6Z_Kprnfvuz荠ѕYP1iN2Es8 T$AŠ˵"Hma5x,j˿׵}HT#~@GouшcF ?Xm|gͦb9WC~:Dci+<;mfpTͼ["Mq2r=VL峚ia8nXY5ϩ1PhsWA5ýܻu,c7G4ZHǴst#G>v[ 'QMAo5XG͔XQr% PRj, [C"s&AS"'47Hъpk8+9?k6뤍C>Ug9s@0}qk {[hlOHl7OHσAH9eX\ #HDFIۀ15z !@Mgc!z9_"HRf(Iꝑ$H<k0`hޞB7 X~#>o4x[Y#y/$H~Mߚ7}rϪyb׃O`Ou5g0 ڠCn1U07r=ehGHG]*afʍnfs82hyo8ZqzX{9z6[۲[>!ݚm.:QE)h?ĖKYD^~E8_M6N"F'PCY>E\zԘ8XҪF9izrWּ[*Hliz|pmGkO p{0J̕4,hx-3wyŸb^:_;KqSO4Ȫn6b\O@Lżг>W4Jý&k\5x,)ĕOM/fq0Nl32Won?{ǭJC/q~pd8@]1^W{6HG3r[잛QkGW>X$Wb}A9:|}?sCL޼}Y"q8]v[{w^}dpޠi9T޼iXr3wi 9v.ȢʥŖ[- J?oϿWI}4þ;)ҡr|`_2c_W۰̏+M 7{Dի^[(rǾ2!|.H;>NBӯi^sz=UH`1BȼD9dw0Rp'([V $.rg>d/綽(+2L6<9 -I ZE6 #(SPICT{jfq: r,3 Ӟ;>N\Qg}VXY6 9yUG^IrPcL23xf5'2xf,Oi( eL^ɫ3yu&yigL^ME&EWL FV^rc#I9i'* k]hWTqU[1[Ռ/+H3oKA#.X a%XcBV3oѿ/`E[KmtQ_FgIDe{cg:ʚiK4*a J' U~zϋ^A\?$%5Lǿ(~[&NS-An~.`?OM=Bq3#JpӞ tHrjic> WDޥJ[/fFۛ2QJ|>O+EМofc*U KBF{Q$T{g~MFhӤ`jGG20LRaã?R2 wd['w*#l`ws @|_[QFnӤu?/+_H+-$r>iq/~6Jz*2h$&ln0!yt X8v֖R2|%gS4apP~DcJD>|~^i?isV͈ՃQZr滜Xr_,Tŝ)c_iQf%F%J"Zj"$o ^`Z|VǔrAT-jso<R.5N9VNW(#]$˖6~Ԓ{)=ahY6t,]S#w.Z4i,.&SEuA*f[j8Ƿdj uq˖6 ]+YL'G35wRl-k^wRӎnOʲat["<ܕYJ%+`|vp+yXNYr+RԷFp7Ż.侂$j\m{&*fԳ'R(Fß- @q$HJuN6R2#Ŧ.HQ<4!FZ82a Q@m O=EH->F z<6#(GgʨJNe$ QE%R^{iD(Bh?_$8_9-JdKJ.jtMj9)ĈtI21r,.jWA"v?n kq7Ҝ j:d*Z`OvX8(EXy8=Ƨ!R9ja+goEzB%9Xb+BY WjD#Hv YxIk, , wX$rDkNc0%"JzgQ{C<'?6vYs,Z!Rƨ"L`)i?Rs.0heĎic^H"{`tz3})e=^t'ƅŏaޞR&=V{a_ZK~W|6c{(f+tD*MAh p{>:N},.C ! G#L=ESk&plN9ǻH濕@0! H ˥CRo Ӿ& :҈Dž>1)݀*H2e3sfϫ!MOV_*) kM4ٛM{ ?X>] 뮪1su.X\E9?x;o7^O?^8X ,TEg8>}eKR+b|2~{>b~  P[KZ*b7t imFjYUwh}T.\ГetD}YUַ:d[cUN©Zk$5A}yrң'!Nfc>%lCo~u|7w/?1Q?{g i}׮ w~GW C5 ͛uh!G=xvِCni,ڭ\[lc˱5mN"Hu]s6Ʌ xQQꟗzEEq}UE}L63 `Yz~[hr#"tĽ\ 0Fמ(NFUJ@p*`cK4[jv&ܚ-ui)q伩!B_U#o NIJttTG@޿B4cC w_BqeNa p aJEZf)7N}TTP٭SNF.,,٤Fm+%isYD 1\;keE{Ѩt9qgz<`7E|hoH YpàB%a[aג6bf]534{Tch !5\(wޤr O=cF@X1b"iZ+I`m`U{g=wRbn.8Ll;|FbHLvXMna#SdVjiJ,!B:ͩ<'Alnmn< δ iV)4cU#5`uXQJH谳aKE9j34@^Ԗiĉض4QKAǐh(B9E9{σ3-☌d5k0w9xl3{4}ܷ/zǯoW,'n}0i.,_Ҋ+XҨT!$Zǟwy;$OǾa?Nuj}xfPi U`\ǣCn3-4Ry$G> {54o &o]f,[NjdƘ.l xSs921vƍBNalM/˻ ZOwǓ}l謟L6njc>6o7S{6f;SdibZg٭uvkZg٭ kZgjJ[k:n[:n (5D$/w燾>߶ 61w ;? ڌ#!%3eE(G hO ' `>Gr{|˷ T&連F ^Ҝ2b,aHP04i0נbiP5=\ S|F#ЕƣI015c`PXX{okVQMҗx.8K!9*782:ZAd*h(%s.Gg0ezkLK5fp+54o= +&7&Q0Qp\B4+R6"*HUkCiU"/QF&FόOJD.]j$fZ0;aĀ<*3-Z"=n<9)NJ(YˡCY٥,:9Fsn6byY9{5?4_sSw#DrΎaF+]*9Yʬ8wspslVoD͑6 &#t4O 柚;d}rz~D41)az4GJ2F?QtݯG1,Gkr#c؀*ˇχ¼u,KP쵹_sS߆9Rj=f`E}eۛ_''}ĈAQu[{1q$FI߮`M0QSsb?[W&~j|1}p5惹ם^v/&s.8^ոuꟂ'{o8ƫz2feO9M².WuCWv#X͢| o'7}5M?5y8&GO~?ШuX7`9:{Ƿg޾xF>;ӳ?O061!{]oZu4ߠkNJR=M%I^}!mَo1m/&-N|۞],N\y&̾5'W&QΨ(r>*%DOm:PmU>$nr,GԐ!Ci, `1)I=vZx@[km4`F%LQѱކٚZ9z>XnG$VJ&_BdNK6*ϩu"I̵L113XC":8l)+PL&n9np;ioڝ`˸xCm\UdfgA]CK31'2 #ƘC 6kbq[:Rhk,,V]M{'6f[~#DJ&5KY^3MM Qk?!5xq^])O-{QWJWŷsCO[ņlW:IA$~,8uЌ@\c\؞ p-Dv)27W 60!ǨE`hD,iɒr94AEL'n2X&J h"BH,2l=1̼,yL[$%F^1쒘!p$d$bT(.R#c& iT?5q^$e>@!,f鵀p8O|Gc}""*;Jrc\F l5F>g14iRN]7/$ٔ6x/&.O\oss_M@QY}hmPeDiU#A U0#,rq1@|3K9Y CF8%8ZS+܊y!2r<'ߩA0"f4 eDzYVqO6"'B*BeK`N~>`[8n%3dț+4ݤ|ljK,g[Ztii ^Q%xd q@J=ޘsu^">o͏VN ̼7*~ \b5Znz%_7Ԫ<:'g@ B|ZL))9ӞSE11]6 T&Bh5DS[)LL /|sܺQes3'ƽɢ!k%D" !Kldhښ"qI{SD*^2 J<%D|s><<;nx p5:F^2ō]pL8 hSNAq<m&RZX" q rЊSQWE%/X[ I`K(FGτ6'2X8DL!NDBsX 5LZb+V"_'n ]f }]O~eHbst1x v xXvS&O?6xy5 ;aE@ܙQYcTjEbUw.7k,9Br 5ʨui0yD3o",e{}q=V,Wȃ0e7`F ao cMbY֙<Gꌤ**gsm;("*P9R5v!HՔg)$MlP l XHx-<<ǽ,# (o]~a B[@ ,AP:n\QuGŀ5*Y &Έur֫ZXccH"bAyNFfqf@V$Ew\ymcHr\C"?u vx5"T݁{rR<$kD8/ϔqrQ[$`gE'5~RnXEzpy@-\ aVP.yLQbsF_P 0gt>dSz\ .v4} ͐1 & \cPX$ļ@~3ݎ;vax}oAx!d!eXuT ʁ_!Iu2 V7hb(8|5LuV@:׷P,Tx'BOj-҉GfN7h*\x`Bd@hRQdD ^-!^iUx´Ť%G&Y@8ߤ3xi]R12ؠ@NI^S"Hf Pxy՞IH Q479D#p4LMZ(ASp:!Ռxd(2;WNK0PxKJp4%8z6%>*ئkߠă^S! V"MC2oXF׬PxIH=Q8Fkm_d`f0&FbkdHJˇнE]<3;3w>#1@E OP| "̛fϰvGL5FG.0bg 'A;b5T`B1W([P(gBMBB4Dk2@[P(_D' f߆B p*<#* G~1"RzkoAB6@Mp-@R u󪡰mCҘIFs1!{'IV8ox +_y 4EDctP["UYԴ]نB 5+ͼxe҄N/M6m(`PRe ؂XK.}BQD^?nCSSn& B '/ uj %QJkc4o9%h=xΡb!&PrP(<K3(K 9rABY"d2| ̋QE %C`U<6N6 Bu:oC31ш͜ 3P9õti`kZ|6i)SA" U] %7_?NJ%F*49b%p1qRm(lϼ[}GAyarZws~0)j ohx ,`”Qcp~iۄF ~ܭsǹCaVqz.A')I4φ $MTLyiiǿG]a 3ɆOj R1Tt~~sOT׵4ĮFEtwӃw}7Dj[dg;/G I};8v|Nu⨓tWKHon'A80F&>%:~tlS kc%zml[Sdݍm ;ngO^ .ogMJ2N5Y81@qhGF|9ݪ?:5kbWzC|S\=8K~ηout^x7~k}7L0mi{3O##GV[+K(oC((Q+aNZ4Z>)N~k|sg[He8[px4p^B.$\t]nyl(>+%o i:!e'x"~UpwC1m6Ru #JH"̾J1 ׽%d.hK kk3#[ETL1,U{edUɣ vrqPfJ+71U2W4ZֻH BHU>nzTUL,wTU/U63ڪ K.tt1)cX*O$4E}ݷeNX(t'kWϟ~;fYiȩ /y~)]o9Q-w}6H1ֻq<~B)i۸`u_קxtcNMI훣# ԠكὫ8BgC.G@n3S7+6 :Fp ܝSM΍8QYa;81p9Xf!Xp^iMkm{* u$pUKi$?iPd.2 /{vP_Wb"1[h% 1Y{D7c?! X[Du(w28+0FݰJcnÛr3XrS;Jl9e1Z6s&D<~0ȧ3DNn5cLoLJ.J4uD뺭-9ߓ~05[[^O{q!Q6m3{l2ٳ4 zmB6xۆsȗθ Ǽu.neU׷If1ܷ]];~MbD%zq丣HG#Jd|?R?dӂm+,:/[)B{D"T%1B>"%|{sbh\ :5-!V ;\5:{E{J5I+}Cix7Qke_j߾reGcM/.հ_fl[+u->dP7}u1@ݤxĕxaIahw\#TlJv?'zZ)bGg;煁9߰;;+;O5ରRn&k-ϏɆZABUAKk1Z;jyzACw>B_&Tff'Ouy"owVm3V*h`s6w+swч8*4EG>ʺ0x2˺-©} {@~CS$cYmFP>h}xAyaoc8x9d_eD>IᬾyC%ljG9} 3;oص }бK'w.̺QZOf9f0F#0m;W_furop6Ѕ]N#ݬ/?>ݳo;\^S9@`yxpWx}0+sè`";L# ꇢƿ}ƛa]y!;+P^[y"syb eFqtv" C֋l{ ֋bnE1%{(9Cla|^puL1tG.+obʆ1ZGJOGok+_d}5O@0?.lߞŪVY]^\z9 vg> hzVP?p 8Nnh81?-wxF!:TӇ~ bwcjh8} h{Ű鑊/oG1O?.{q>Ve ɻ݋)e#W?Z|_zn^/e?l{ߏ.7ov0 VB(i_/O56`0*j"X{ Z`b^݋:O]ڛmnlIxu2Ctŏ1WR!Ofl 8 TR$NdIr.<Ş8bt9J*]SMt_dOg4@拰.4NSԟ/+<vNKo^[0?e:i,+$(ɫ?^q+ Ռ~ +;:<;:~jn.Ցq1gp@}/GUuMwl"33b1PyLbSwlwxof!7z(XZ/_R 5@Gwwh~= Q .-Nѝ:#*p;9^ z+֮T$N;@ʹ~LӓB1S8;=W3Io\vUAN>M]s,99N<[聳s&N:ẀÊUc'+ֈDRr/[J! !Ao 87Tpg8hk'M9p4|s trX39U\$#څh 0Ne"Hj9VHtcmm¦0] OlU+FXX8DL!ODBsX Zݾc fOߦn;'vr\YH v UwnL๜4!'d޷s *ogH^OusSpݯ?þܿA-aH=7br- 2p c?{㸍8 ~4MMM6L@6ƸeLg~Ej%dXUX$?UDja+5XJ ݫ@#|Gi$1MQyN>/_s=60)`vp3cn495&H94>'i̟]p>-t)ײ5&| 3m-&HZeMaʈ*:F9c8:ad+CJb腑Jmc3*8vE N<\K9#n8+`loÁ[3;58\>&+Y]ؤu19F|-5dMٚvyTd~;OE/BtZ5ֺy,h)T2gKhODAFuA+¥&LP+Y | \K kYúXs OA:MyF#ixIa|x"(9XUf*b) Ei()ل$. ZP1*R-1(-*Ôp "JFc&=j~PW+֫R0n:{N:ϑ6"@u1<+@ +B`mY&2€eӱgcim"lUQhT `k U c A@H2ߜC;6h9նAzF6ԯkZv^fk'cLmm>o?^77zpt##ر'bG9gV;,(5;Dj <\GѪnt'^}w5Bb:v@w`a%jw_v~w؁.b=ݹ_;;6;;w'z)-:S -uJwV!n'usm upcݑ[ :/~V˖-w& =#y`򙘍1YLjugb퀞~h˟N:kq;N`XhsdeU) dA%iMr3B]tiYaA׼aYӝv^<ȵ|]ZjՍӢvϽj^Hr/ +!j_`ܖ Kb+s\;C6B ]8 XP~.p*"Y0wy oc}l !N x jZ7$rhkF(-Mv.]wv>ЉT;!买Ep G-I h|QN"^֊_ e(ɡw5!ttne@5}`&>и/2 _}~ pS2#kLH<2^GhY* fguš)?M6\h6~vbFEA$焾pt}v2 {ӻU)?7)z<IQÓl?oE|բk8nDq0fIi.+ٿ*`4-- Ѳ?%u|kE *uxlȜ[o?΅ݗH) `yWOל #) &e=qH2 $\.%6!`B4IpZFe)A@fc"K*+>2k)b] b~l*pZO o>[,:TՅ^MȨ*eϪs)ͺ] ??,FRK./Ƴ$>ePyr'UQ- Y|u[3Ǘn(`]n^>ÎZ"(P)$BBz,:c3kB%d.n8+`A jcǓHwD@4:&(fEwZ"Ay9Ϣqc)P s?bx8uCY }I.j`h!p81`K*"JzgQ{C @|yy1Fj=dfIV_d/ӛd8vff6.coq_R(vU\dETEo+3ГB6j-3 *:mqkҠ!r.زp Ihxc%S۾Z5/BMRNdX 蹣*{5{--_m~8qGMQeFﲍw?Ugf;ߕ7^ͦW ׉`1`˫ [Wخx,k%0!TWֵ51jHm5jօ (qibǣbUz0r:uguu־BYeEpIXH|;T8flqg-BuP*@&~ ۙ\8 o^7ͫw_}=&{w` 0-c&75>{U'T-[TM;j|9˚|zVdYor-wOFARʅqpFMp%s=_ j~]z٠ܼUn-Vk_EޥbiGX)ȗńLƓxAB{M6G)̼I­:Isvur*$0p1!pd^{; D[)epFT3R:'鱣 s8']O&=,TQS6y๭&NOԉ9 -I _|Vbg&HLj< Ĕ"TRY&u)tv@]  FfڊFgqJ3oL6k#ݱ?lʇN%(G('y>KJ ;y#(g ͻY.`>{5W7<Mӻ޲^xhYt\(" d'a"{Z2D3|Q ҠxWA4\d8J> i7vxo:>ߘ#k50I~`.G``/UnX0?yMHV0#)6gF\s6|ew*gfsä3d#uddch}ԞdcP:9;m -uNvsPz)nM8*[cR;ĬW逰 oj`)[;^sGsVQ9upac*wqev羽 )6ؐUeSx. P^m@. vu}7>G5|>}WTm؝U{R` {pcY~d6/(K+&`,_A}d1߇MTYqlx@gmyvzP4 f,kUƵ1ЖE`OghmimtJv"gvP9  oJl=lEmZqNEG? սyd_ \[ ȌH^b#!yG4: 8b{,YydcI S"!&" 7s$ ckp,ye3at#lnp0=4m,vji)sx( rGV#$A+"BB m D;mDysݩߦG̣wƠ!sb7ܛooԑ! {H<%e |069YGH(򹖜N)bxЄ9GP5#䋵_"l˿@ ʼnkҎRT=JEd5J=nI؊ 0pį P sZD)3ln%9iKk:ym!z 7y rr6ȹQjE#aJIZ )$tj[^8}/=v+%.ԵAjk=bmh}=3$z`Xv8+e:LT*JK0_=7?p|Ck&J!$9^)Cފ^{-THƱ`c)AUTrK [KQw]M7DYNټ/t22k _8Fh#9Y \*/IEMb`"|L+݆ X9vrӲaIHeӂ愹L) <*d,2%ژOBxDQY0t<єjIRQ5D%U6ޏn#! £*WhЂI).;E |uLZNX;닇 CW Pgdۅ ~&@H!~hTE"7s^PWk'kA$Jg햬Es ]'fZ%ka1$UBaC"|Nf:sbq$xdID,2 9iEJj\Tyk+-[NDEڻ|٩5/K`cPxBh>BI"X " -笨8e]PLW8V#"\BKLA)1EItnݐu:-i*:Gvd"oez6bJONf EWk d= ߧ#ݪ;tt$t7tj vGዼm%!*"E gD̜CIQ5η|BK:PW%(at\`R3BDN Hh3.POFT-YaT.!M)E}f*xtDGG!Ӌ<#ļsKҬO0# GB@|J(Saʎe&sxqItR*%pD#P|A( ӿ\nɩD=? lLu6W0iY%[52?'Nl=N qF EƮ2ই\tϏ;; &^m˭ fisA _>VWf{\Ql" z WJrNe骜Sfk2 HUG'$:s*}QeqMho4uw2ЩDpwHBq?ǟ>~L>G8D=S0o¿4=x1?U[Cx e\hs d|0s[1z<~q/3>jfWv=kM<3zza6[4+[o+b( |V>T"DOSH3>oUZ&O0_0狍e4:o:oxŴ\IIIim^"Nӈb> o!vC[f2S8@M4[)Q;E#.٨<0Df\q tNV:e<>|#^khh& dg3][;U;ŵsW=ɸm1&c!nqߗ;Stu'ŷ`XLIP&s FƆT,SԦ.,b ioq];]۽ '-hDT:b^O?~s-pۅ&R-/g0?E#BL(I`2D&F j*BHUngmroNwLb0xp/>_SǢOzeg&y/nhUQΥ nɰdn-/,tsLxX0ɱߎ˂E3̂,㛓䛽^S$u{ ip --eaw m3kaӢ0/8s gv1sdqh_]:80Slye@һj6NV&*6lĆy_1ɖm2.te(GKtR܊ tfTbޱٰrtFך̺ឧ TN0U 1oc)d˪i@3u\Xuji*-(<6I}-^#ެxtAQeqd8i6.E&,E˝phY<5[1Q=s0H.rHڰi%\ƛmFɶ,%  ] A Ȃi :V:s.~2ޭ޽kc%˅@qÅH©I@e$gSGΗ=s 2%| >V o5.g3ۄǚcGr˿0S[;w')p >W0vPaY!|{ R'0/R_R )KK;v\IL(qN9,h)D<b\pDpJ۠F5vdTi(5Uk|7;,]n z @/U>oω.J!$9^)Cފ^{-THƱ`c)A *R6!k:zE*vf{Aټ F夓S p>wqKpb&hhF&J!VqXRYﴠ0)G"ED]ssb]H"6*k&: 3" @85I*ʃ2#༆(G=Y[E3 k0ZvqZi䴳=0Ŗ4Az: IGnN3o?g+0jϞ߾ւ &ܒI!?  5Rsw.݋̝]{;AGv\{!3#ZnĻw2ƩMs6$ }8os-ܺn1nw{}s6z^hƣцfOL~ɍĞu[:6ޏ޳$W|]`i\2;3 pblE=s~%K(eI6ĖVwX类.w_!&Ԝ>t <>mgq]NЉ}Ywlh6mwCM?m/~}靿ۭ7;*8z>-{xZx'rn!dBq3DA#ehe.SFF8@Q"\$P'TL^$p΂sH ˭V`; 4/kx\wz-&Ff D$OF ]T:VDn( <(ђRDkʲe' 0E\`gr7p&fۜoDDei ^ V DpS9.J`p,X-`)~AF;m'DM^rhgqA#'MTCe`"9%߉صBL">ÒsiraCӃ8n.kozs}+Qj"|`0Q( ,P6BPRY|^-j ĐG}FivXbч;ӃC=E Lj&8J%9'(BQCCKX2`v+ao(ԀZ0DY:B C ;dׅX=x:;z{yZaN޻cGgH7|<͡^`\m.WFe@&+{w=0ZqE9N : >v|Yw2ަ~Z3΂C6`\IXH.R Z)Z5c+n' \cNsLj0,$Y%e\~B[FRﲢzm( CVB8Hm ufuQOǜʣXyeKÿfsIN|)9ʈvJg AfpH:)i; U'' xeeP(I!FcJ`іmgM2BrStg\0>羅6uܺ#ߔ<]J%֤/[v:Z(DdMbHNBFs&̤.$O'hBBpV.s"F&RHU C8eT!2 Ev=MH-JkkP((&ْAs/d**,^%"GEIҖSMH7P *bB4ԩ!3X#BR,ݒ"2g=LK@ 3:Y` T[r#.LWbw4vL4H4'!X$fS*sR\y'7VYBt4vD46>;VOhZ{ylҶ;p3s(b/8s/6R4QT5Ec D;+jNYKϼ4'9.ϻS׻'k? @*zCTgB aK̠! !t1EELAƐu&B2YˍPKG P0@}ΉQ6uƵhm;5( Seg.XEjsp q̕Ar[snǕ= 8|'iZ q%!Wtմ k\F ٸ<|D qx]'`bx)IWedӇlZY !djqJ#yaSߖwzm3ڻ 3IX3Bdpr7Av~1ϟgǟ@8 k켅 ,nki\o47bmkoh}H>԰ٺuȭOn0 뫛ŭ"?9t/5"g+aWpHW=?vŐٮUXar f @%TC5Ok/`޾Xk-j*f"~h Qu1h38#FEcWQ"p|>w30yEE>6~ĠN2b*)~;M#.٨`ɤ0q͕ jh:u8}3hh ' dk7; Z;Q£Sw }=;ѷWuK_2F[zۋ!Yxud&АH9w !؄:Ez_Lz"";u9->)$,aD{  g N:_Ʉrjue+&)8e -/gP0")[]ńVL"Wɔ1QUJ**CH[2,3[JK3=-\/v<tmO}]sL/L}w EOK"?~u;_ ]*}Yۨ +4yBh +.8 cO:+/FDʡ1unQsݫOiF:^N#H,K_iJNear8+AhcT21C3b NPY"!N@mrvLvti-O#ھjyQΉPjF ,7Ӥ 52%͟BSC֌Kn N51 Pl9J8TZի0yu^;wpy %ٳu.|@%S8C*' T5Aޅ'><ہ<;o>n(TPc)_,/rZѐBPܪ(BdhT W(@S nLQ7$w1܂?U ˒ӺNmvK.{X 7m'%L 9T |w`ݎw惓[vM廛Ki[^sz^_oh=_BwX;i|Kƫz%AYyXl[㷷nn~r<,& M.=a֎i2zݼnnv !R8^A% OZJy"\[HYrJH2jNY[̝ 㝈qȴ ɂtH9FELQʐuMK{=x9{D( fZ/춫Y9#іhb^ bvU."n{v#F{cGM̮G%GM΅zS}pF%4=>Xa ܙ BDLDH<0*ɍS3T` EwS=j{e&;ƾ/dh0c)PL*aށ`zk૦ӎ+nGLI+C%I3VTeJ{#x C<<yD!PY7UO eD;0Y'@4(uRSX4yyLO49ɠAYm6N% 9#t:BIP/VbfOGCǡ{y4dIw˻9bC8_Z6FoŠHDb:jg0 Fs&@fhp "Im'! V&6F +5^8 24  0V gG!&3w4;#LQ V-(oA M|eU+BjknW,HVjȻ[R-st ?". t6M2Pm[t\|6QnǾ1a,;'GG(K"xNEn np(EZ;`Ey+D#4|X~X{qoib[:ԛdT؛7 xg8e+<Ϋ@&s*aPNUbSL{{ߔBZիw|[h^}߷cٽo:h4=:'F{/f31- \ɽ1{/OsVLRpW3[)JƟ'[@";dL$a6D%F !$9E9+<(%NmQJx8MX(V,V^\ 㯏н l&M ڒ13f3hB4 >&-}4so6j6v^*:ka={Oý2Gܸ~TB!O'QJꝫh)* +{=mk)Chz7ffuۉpm[iOC( TK~GJ[ 1U.+NRD%] 靦;{#B3WG< [Sg}5^aGMR>`d}s|8s<(%8xI)mA.JŬ`QbE^׫.mt8mT\OҜ邺O]ߏZ\M&ηMfEqsٛ)po-!;d>ud-;QNt2θ/˚|=0{`ku\X=(^OھcLkԱwݽJl;y+6'mim`[fAmKȸ榇MPQ/Iʭ{IbNw/{YhT-T78=)?*cŽ 4 `2~m ܸ!5%lSa -{nmZQ)2iqdow,6S*ly%IϾ˾V#7(ހfXԬ8 'ls5[:ʜŎ$ڀ!aHМˠSMPRY&GZb "' 89NX{Ǯ&u~G}rei˪)}BƯ5VXZF85J$+U6 `^鏜[_]/yJ^x@ \DN1B9 sB!" (x"/a8Xc o8VCk7'q;7$rrDJ#(supϼըPd[b z^t:gLȴMpkaJ%2D)hѨރ p4 _YWy9 y'-P.1J~" 3p$-DwZEy*-ӦIc^$uJGc6x 7mQ72WSTBr:Q/ 'LzɤйA?&*gej:Xl8{7d'C8SBSӯtWN b]l(:ݜ ~|qس]Uc3'`8N`Ef8 +'`M̶nqBl" ;s8ܟ,^l$L\kD7ZOHt6Ԟh7'X7 &;>WP)UskbXo2&"kS[^-NWՏ*NˇNNwamE3zh }; yp۳MuX\f~|q= Em/sei4UW7%dg76u-p,]z+Bβc!K\>, SzB ÿz(U&hl8^\P}_~)˿ן|?g`}9C ƇnN S{r/Wpk=M$'ʼn/.lXp ;;OF72RpI#͋WBk_YItIPҞO>΁;FϧD !rjJ8ᕥ=ooOm uMWPnom9=oz(3_CQĀjb NP*PܳDB0:QYíNiT<gU/,gc}wVy, jII.mafsElDAa9*E2>4e(F3.9N5_سQl8!ڹC(2&5k*o/] !,Iw i>)H\="}x{mQĺd yl֖@#Rcoc(!J$KԘMRKH.8"G:˄VS:̲=zH'^5-ٶkS7ujV݇.(t*ЅQ@ [R13$r± AUic#)`ۗ9cyzc`A+RH}[UP* 6"pcooI#:w<+X/KN63;m-M7Mcw8EٱoLk^Ф`{^~j})/;~غkG11dʹן5\㰘ïtf??*֨; (XXp^Kuvme,:uy鿈gL';È/l 0ê&:%{/>h|*{TTQfnqv2b~Qj:-E]&fm1+7d&ӏ?3/Kj{Wk?d |(x~x3^ u4RWZ[uKx ,Z/fdrHx:f@ jb_3=79_jPH=0xԏpobqo}-W{ɱ5QK-9+R 3|/ŌyaZ~8Vc˱j p"*)dFRK6;˜ܝM(;o&YͭSH]m.N`X )UZ+v($~ _FP$]i7Ǵω;ȶU1!ۓa[ITTgHAXҙ.f˜%D÷'_nQYYϾE WhdOsڔ|#MŷKQGǚ63)p=8}S,=unQzDEk.xpLG:ܢc@Jz Dje.E{nC|(-a>GfmeDuN5Lg9(k7jfnbpN!v)rHʹnB3yD.$gQ):fV3N1VRrIގZ +FneqUAAEyvo3>yjӢYl߶MgżiG .HPWC)p8Z!ܦ,6ӓ>gQ >`ޟ}&{R+*rx;Xt+ɪn~ = @KױTyMORfƞ@1ZbcAh9" lcL*o=I'HO8F;Цr=b.NYDC*9I Bkԕue].])ڮmW+EەJvhR])ڮmWXJ>y%YQq 7ڻ8_OtlT/aP{0vigp&VHp# 9/FӤv&q~!%7)BTx 5(¿V8db kb ]e*G:{>7wUq3XfU&m .{x\h.t9L/n.WsOۙ3ɫk |*,m]WE0M_]ٙ!>k@JoWPnK4?ք6Vyin|/klI7k ^M~_am6_( 27x>xdrΣA!>Y=|<^㯍s(;nۖ=}h}vp0bW~=J&.kaK \AW!1E}z;4D/V{iA3@WG'mhb~88O;&웏5A 0$5AZX,@(vGEo_> ̂HB9U |˽)9A phWT2=s]EQ7?- ʼn>~ū 5dYxd>Z8 )Q[M*FfdHrBM"5aߊ)狼t=Jrޤ(!;l1 zn(M?K:0ۅP42aD%$@tY$) Ӂi]ICȅ4*l0UA0`[ B{iL {d7$cZadɑ($^2 YobA% 9Ij$f.U!c !N:I$퐒%BȫPHԔ1тFTc1q5oom8-[ָ `Gm=Ӯ&[MBΗfRRD_Wy$#pTxdɀaYc; n&5 2myV o5Z6L*2:ڀm`) NPB2%8pKZ8؉ wV.,Mf,Y*<| %Ws%ӘñbR%c6(SZ8S"lDePޣGCxs1p4a#A29qxsQ b$>zk8۱ד 'Ӷ{U͍yk[JOMbqlP]#Uj;ttt1L xԪ'sDaFz޲N5R0A0ɵ r B($0f4j|sWL#,8ZI}WmRok% 6w|JUcٿRa ^RDm*[ ^MewPHPhXvWIu~Wux ;n/ձZ e>T6cC&  % L _fST3m-&HZeU0@eXfr J^FaVi1 Iv0CQ{0rHRukㅑJmc3*8v@Nx '8jᴨҿI7ѰsK {8Kx<`JLeE\5(Y @WG>p`X V "ZS"0œhP&,rbET!O\.%6!`Bª1 5(K)µ^jFeli?trB#،[b`_5A=bNpX:〢4KGERl˗8.sڄPA9*ȯR%XE`DL )d4&jci[F\UQNS1ky#0!X0Ǩ^k2-cKjN7-kt^\6FVnu#QlHsEj KY ,O8VNǣ~8%֖R2J-9+R 3}3^ __ y 6ym͛7!r[KfQ"FSA#A*_'ix;Xj}+ޭ2@K],!dSH;YW\1ـ+Ӄ+Z2)ĕ#=~TrOXoIg./C9"9'zՅEنڸ 6G_z!ڕ^{k){=J|fIurI ,Rg2D'(mjDX,-,xGI4 GrM2𝀬  P:Vv8]md@`q I͏I<"_aRۆ ny'I'#$sdv sų$AΏ,XPPl̏edžZf'mzI {vDg 2͡L7=}|QNKvZJt6q3߳RS *_E'/oxa&WBm>76'2P &[t+7xHuys껶j=OLY)Yd~Ë~agN^d r}튾jmh5Z|S@yS(j8Nx3N2F<ޠ7cx:{qaFkr/vb\¢ e6Hg=0sKLp0yrQ˻x܏֥op| UZ|8z|Kwٛ*#'YGv: qLX <8TdѥZ ;x)5Jߧӏ ߻@v>}̿O>/>|<9L8? Lj`o[ oHr USޜ4װiMzЅ&\C!|f 3 [So<*;v~ӭD ,旕w{jT?CY(!捘BTB#q!(1NG%H; y8'ވt79]Mj $7|''Wn^KGez9hvvQrlXGP9=x؏s}Р Ly$"C gsSQVқsX~)v%(at\`>M$?ȝ "b:%* x N{3;9 lכb:eKvOwr`f燲?[e o ߚz{2TpWWYݚJGHѲVI3ʖTjFp&k,nba˻H&8GQN[fV6paPȫ${cuDMzg5{Ɠ(D4 )i80JڌZA=.#:hQƆxTo~cjM$=K"sF2H@NBQTtP9|"QYs -R)4ΐyP w60G*T/Ɛ,(o}#yzD"~P:kd}L.:[@4*Y &Έ&(CM1c#\GY"xNFnИnWEwҢh1v@iZԛ3+6p@"4 {`V v8*[uh:#.8H[d4g%e%GBhcCF4E 8RDž%z6e,,r%t`u\L)(45 ,CȢeK/Ͳ{*i}knaȾj=Wʚ.Y)>5\-BwT**|MWf`SʭYǪLHSƐ[=8*dʉ0T h0F%TX^Fcy"\f!Q*zlI\!~u44(Ǥ",3LG {FW'Ն7Ɔ!.sa6AQCRɵvRhD $5UN.8aE܈䭤>yXj!$ FJt Çi@GIk?M-?Rߴ~~N,r<k  1)ŵ#,VMLFW1x#iMe x5 ё C.҂Vg-)zhC\Ah…s.QƥM{j<"c s|C_c,\G 1ٙ@+ 峴qT+cP:HVBEx-<+P:UzWfJl2a7/K7 %YAfg5[u{ S6F;_lppkxI蘡-w3+R3"xJ0-._"s2[`0y-Bkp3ZP0(&!zǘxbaMWj'$ڴs ҶMȸYՇC5NZ)wHR^tp; &+0Oe_slڦxLykf\x!Iyޱ`Hwns ZgxH~˧khqƆy;ΰM M"ǾYChg.;͚JK: 뤵k}-VW<%ʛPँ;a߲Qߔ&a* aԀ!Aq.KNm4A i$-jA7zqgg`OmxWyWW4^V"Wo*Gۣ{:j;<1/xP줁qjl)'%$Km+aI@3Mvzns>wEH]7,c ?Dyݦ#wk˝5*ߧee1 ސȕ8u$"rZq%w.% 5[WKF9gJFɃLWIi )dVTr9% NA x8r1Zj`݄s]ʒ(9+LڋM􊦔>CD{UUkY-R~9'$M ]bf}97h穰LY ='=$'ItB/e7B()-:9V9:g9DWWIgoҒ=# oָ[RA\WU DQ79*zR\A'5xVn5X1цZ^?j8-995ΥݠC5:90E%ֹ"s%9wx/]3ľ)g׈2 UG'Ub\?;<|JbCaqtݯ񦸲ZTY::N.o*˗:~VHrqёD2SſQO\[3*ύ:-iֺ6!F|Y~~bap09̥~bܮR 'UjcdG 1@tYGՒ j,3 6Oy7]&k&'zIQTyc!1 d#b,6Ct;eS<`q{WȎ?8:xӧɇ?ه'g|\I9SCmm_??j4қ6 Y|k>lArf{~r+q~Z|?Iv2 `32;2(,xޒed GJ*dK-d__f󣛗OYp_U8埐d׫Ub^>J/+u]e!f#_c?PM/e%Hh1?z ?h$Տ~<$iNXLk!(00*TЄӐ0G Q .yFiҒW6,}vO~}UC̘"&  WY3@H LHS 'kۢNEꧪ}v<.Kֿt%yG`_'HhEf&5Y'8;bJ˄))ןU_]!VBjGg_ i i m8z0)Ͼ}AҞHz 9y)\9^%b[/[ky^M|/HCwT|J\N߳Lg#W1fYT") AxrG9KF*'NoNXYƩwwk7wħ?TݝסJZǜ!™r>&9ZRd٪Bd"#o[x=m8_M7uC;[(Az m͑B݈>>}|{ C@&넦'DH:^tH c"@`~cNnw^OP1#K.IFcFO9&Ƥ&j+Ph$ĤeU|hRr)i2mJ> BjњeetVymЕa5qv좙SˆnNt}`sA7-mm;He<Bn8'lcl[^D݇c-On%t|$FEn  gH `Kru7"ndk.G^r>Lon6t;IBwyvk/]b-G{{31וj.oy鮭QO[aI\];d7 |mZxgn-7?RnNmWb"΍蹤iG;(;pwQ2:m-_]6tո%"w# ҺYVʛl(3d `{n.7#6LfZ^Xxlɟ3QNٿr2߀qhx9p0]\gU*hLj̆`^cB3dQs.hkk3x뙧縦gSr ip_>(វgjZ>~7XHiv}ɼ-ugx o/8R~ROMR?^ˏq)uF  E9$7i:NJ͜`uw'`Ɗ5E!^+^^D!'1B*HW0T^7&vM>q)!wxMvWӓ}O'rH%zK߿^YiM1{P9䂼B Pѥ9C&;s.5"{ ͺx])4p?q~mnG?s͗ye} )_ ^C4F 3YM`u)`x7?Т;qEid2dL1I`kSaeǬ" ɓq=)5k6l"gL(fmD.I'͍LYF:! 5KkvHK{vMX5lqf .q<;'Z ?$\ B$S]; OL 6fuVϴ݋ պ՜ĮWc u*GGI HXCdLIɨȢܓ^l䡻Q ܰle=0mV ԩ7NR9S Z.\RYl:߿TqkXAGH :z;4`:TB lHgkm+'J Ik&Sv,( v"kELjKΉk2ZkX?/ 102,9\XI3sap #T̑q`\~bYM[ޅn2흀OOonѮ'!^ίY#`xMtr|}Y`79-W3MV"ČB8iZs^Ki96A9C6UU bwI+,l'**[p5q[8q]Cg>{(?6YQj/N'F_ :Xu64MyC/ƏgI 3.3Ko#&gyzs}v/ǨDz#zϽ˧ m4䋲L^$OsIaבݚ` 5F0P#jC` QQv uF0P#jC` 5F0fѶY ؍` 5ZmC`X=iCҚ5F0Pkح*ȫ"ut=T~/Ffeom6]F$\0p?4]՛9mtu4bt7?~@+݇x|5wyPv t3=;-=0[^/gt8`]bj.oyv.yrtMLaII̭G͕F6Fo#mh ~塶<o#m俍6F[fwfi X ts 74;s\g0_J^fDm!'Z2Ͽ}|] Ogs\92!F)L\a5#b;B JN9;6wQuR[%p됵 ~K\\-HP-HUΆ{X`ƣSQ@(62,?2X9dw .mtY`Fn]XiC,ǔgZʥ(x䶖8;fqir3  %\_E@wr>PxfbCcah5.@ԁd,Fd5F @ULCI>BV&d$,aR.QBQK "}+HޡC/J:&GcyEl+ٱ4K9!KbF)h$]Z*r\ 5 ePMX6NrXRQX M5 7 BRcQ1Mݲ $Lo1"C&;HeqުtZRmWbtoll,Ҝ3g1hN*xs^'i ́+Zs1>x0vor4=Ug޵K$tpbm% ŠDq% 6ˁdfZTν$Y;ʜJ[ӝ"v"IAthg'W'T賭<\Ԅ'4 Pe"Cs| n 3vJΞ2XEo aDK`+tFjwTgu'(\4b НfܺϼTw%zV2JQJ,=WQK5@L 6%R,,vjWa,ٹ!)6dTDuLYDtNF$s͕K.tU̱줡/іM0AXɘr ["+S2O9++Y5qvޮxf+&EOȵD ]AAm cQLr)Xaצ$ir( 4Ez?}dgQx ؔmstY_T>$$R{ʘ99}rAjP+܎Nn[t\v G/T 8*((c$z&R-LT1C6Lqt{79ĞGq]}W!vK.ka2*IPqP%d-M  T̅Z~v7MB8;㫦"V6%.Y& 6ЄA5HV xlW0YJYe 8*X16g+0vRVӐSz7+50 /]w?HeCBk69NI)c6XzFC(DuS6 Z2N%Y0Q4pנU"/3=7ee61x>*xk#e PvD<'!G_Ug;{NJ_*%q+s)2p淳gL/׹c yv=ёБPd Q݌-W/o-N(, lw? (ID`.qJB 7 Jz JnkPc fξ\^ݍP?sP>!]=`kvp`([{UNi?$t\A2xF=(qi+z\ՓWw{yJqp|L[/bH5=>.z M|n>42(K)>ո;V;vu[t[00IpG 黾(XbϻW>vklxǶv  Ip&J3JJ^ZoXAHn!::ʶ 5jM/?gWUÓ>ߪ+ oUb0Xst6-՗]qe|V:kP$!&A5!``VևOŴOËK`ǍP ZuqMPU!P I9ϓs!,>lR ysRL*&&˙LOqtwͻcL><׸~jt=@˶@hnkoy poVd 4fn Dۛ;(nZ2NfuFH ؓ3 ,|TZ))U|mUdXʍ|)B1R4e*|R$^ WʍGWEn3KЫ\ 82=QYŝL4Ji tFT3R:#鱽 g.>*4P""$1;l4A:FQ  A.JPg [!M/G_ի_Tu?0'c@ZJq: פd&j:Rjsfȵ1'HkA]]zw~z9-sHTY-a~Dۭs{5^ viZ;3MCmm VQ۹BB3hőQD\8~faN¼t9 5^rG${żE'o"ҾI皎\ʷ&2+0q/&罒嬖%ATUL 1=mdY^r٣ӾBHDa,W< ʫ)|l] 闷 &SYneϫds0V ɔ!܅J:t3L ѽ]t__OϦ.ԃ.2j37xA'RYE)MZ9X>;h: <@rFϵ,wJÃ&:xsvQٍ/ӏ&߰NAfEzY0%gٲC~XW`T9F$㑅H>HԔ1тFQ4`QWfLD_Sqҵu?+g[J@ Vb: /7wM]߷}c 7Fi{jfEu|^βJs \gMCzH:'=/at0]CZ0b4uHÊj^x\//K[8c̎11Y6UcGf_0O|ucj Mw6޵{mm{a`=nۆK<]9\ȋ; N퓔$m|~4 5^bF~}I|-v21P鎗Po!5+ga9ݝ.`z>R{f=pKrAnE{գ&owLcf[Ϙ2LTQNǡXh)M;J+uZDI}=TZ-tr7"΁hQEq8i7r۱%u:CiDfyI)Rh'(6A{!,`hPa40}W@J}@ߧ۲f8іnq(Sj˪'m[76\R\iE$b+Z;nb/r]N@y |*ݿwo;ϲ`ot4D{S(sVA-ha[0 ^N2X'S<ѯ5=zyħ( Оu+x"NEpU=jʯk1$ƀ6ZVsJ/waWo^he0m: E9wI/O3_޾: ?vSo~ztwzkg/N$RC`pᘟ^â0ENTdsk,QW+\:,U%"T@AP&1XiXGBmFA[^h; "d!r*UnRѾ  嚛CP*ԯȌ8u*1mBIG!^QNF60vTZe" J(K|f0ӁV-QGk Ѓ kwXmp!%,HȠ!6Q$oBLD#'*B }JsE J뀄 H0Q12|gau;`IVoK\{ٯJ>|2SX1uy:O_<}=sLx7OnA%reͽ&Q&*.6,܃ַS} oӴ:u=fqV0sj0y9*QOYsnqV IW"F bFs;u+¥&L%I@ViǬQRʃp-a^jaiyr6l vy`V]q䥕 \[+`Pt:[oTo>]{E]Aczd6唛D0QRHb `+Y LIu(l5"g$ ;:.TBE" EҮ;"E RrHd -f`f*P$pD0AP8@2qw^,XTQ_.nB &%Xu 6tp .+eF #'!b:jiF<>CpT(y`FqO&Z J b-7F H͘]FzHus,Z!Rƨ"@4:'Px͹`;UW77oZc%5,y5S0tN0c寊~KůpG٩{xg!YixDtOi H˪?`+/Esv͐LC 9SDzS;RL]EEPŻ.3 *:mq k!Mg,/ow\z'lj¸c,>/qqbRJ-= ԔBQ>׋O|:8}_K5zFbFyO A\y{)lɄ 5+TΪKWoՕ7ibvs19\|pprZKܮ8di>+ @~K5C ֍Ҩ i#oJ,G>'| &,]|2^,]\57'8nJzlxN,)^<s '.M=/B6Y}٤ h5椄TMQM6W379ћ?wGǘ}x;YYk\z m n~e[4Ul y57ڼ7+[gB/ܒ(}{3oK.Ee'.w*R(/~6rzrU?_[Y(r#_E>@L#A*|R$>XkmĻ=p#+B.&kOCVq'AVJi tFT3ҕL:#鑼 >zJTin)EDHbj%vhtˣ@L)B% QQ3]P^Qi&Fvۨ=ۇv$PV׎~%ק.]锺t^/=NҟF++K`ƃIp4=khTUT]zK(+$t^1yQ֙5^HZ"M84&&_O2fS^,0$j1 J%^Q%8ٻq&Wt.'h\j{ٙi{IO۠$[EIJlH쏧 AK%~sw}􀽬ŒEeB_'Kw:,ƕwo}Nj)W?$SmK?/O{aչHhER75ͬR՝u1wezY19թX =|ꭽk<{XswNy`ࣱ&'AK5Y rY><'IW9O( >==1n{Ӥ`х&7>^L&OM0p/߮onn{]ImF5tv*Nڀ68ҁAgpI֓e? `+Nd;ô 9y)\9HUQK oHCwvٕJ Ne: y#rUY&\ dT9@2G9K^?M݌\Ix0ΏzŷTvD [QK!eKKB1HE6)o]eu7D!?ç6 n'|37b2ɫR i9Ub6i7U'~<\;CC0ߜ">AAӐl#Q;)7N &.LK{ȋb{ݚy?z5{|jFflqI0G?)B'/Kƞǁ/ M2,U\F\X-LDVz yZGoWe)엕>$%}鿷z>efÖ5izZ Vm[Ozퟠ7ܢM1bO1EMC˙)OVԜX'zY~DPUWoPra?@:XƝk\֥ȲR3Z Y X[SV d~}`V+>/W7G5ga oo@L@OAQ}/_KdzYB cˤ6 %:@ S ;ɢ̸oX.|*u GDmdF eGqd@=m'r啀nB{|5~&KI, 3pRyo9!h1s#w;+U^ O]Znjks)ޤHC0H#h#J$G4 XJu n$ 'fXN ZQ:rMJmH*I/br"ޠJCQW#2*ϥT~M>X).wV*cW&'Ϋ̫ؑ\/f?zޣvKXv$4iU{+3s>h^"Vy?mGp$(J!jT>_1aTZg;G`ٰQ1fQYrA'˜ѥhrBK( \&#U2V~˸]-8k WkE_t>tn:^bW12f. &R&5 ^CDa#fĘYM`u)FL,f>&aYitŁ!(d 2!)SL:$LЄH i1EP a/җMǚ} 3Jح(Y#AH>` 9dB .CA5tl`BiZӴiԴwa'P=+ {7z .w^N50TEr3À'.oV{!jtП΃IB ZK{ [\cߒ3IiPc6)$>) 7ITd@ѺOhF0imVIWM9YQƙR1ud Nstr{N‡*Ρ-[WU `l,krt<"a#Ogy]řAs>Ƒavs|)aVr4D=W's}:Oq%'_Ok/>~ û\%{靅}b3J3,gy!np+B$S]GYeTl ->?Q|#~=>Q6ęс]dbQA E"Ȕ4>=)xMdk<9YSS@λLgKk/R$˞/Gg w.ŬGgWAXؙ?!. vF JgjC#rk&Uv,K'\DѨ(ڒqbĚG6;㙇^|;Iҗ5WK)'L=;w\u<>eb] i^I&C-p7k9{,1tګܟNXK\ 8} 8*hHr`W~ ފ><A>{&'vMNl)qE,YZiGu~p+D\2vʩ؁]GJpdyʸ<.5KUΆsG \0)oQ8Ȥ"ے1k:3 yx`vdoHf)"?кӁ t08J:pG2k@ hIbѵX){(r*%$NwN؉$QI42}[gG#Jٖ5!l"&rB8 ̗%4L04՝1f1h St4)U-a[!P$Ƽ )h@Bj9zl舐`U7P݈05~_Y(ӨYz5@L FWs|`yw)zxn[Ms;B50x pɔGyB:,:Y'#WRIAƹ%qU̱$Ee%}>LdVD0I3٤TZ$U΁z?@zA%{,9E1+3.pFd̄rT&rdkSV495U௏"+C}>(5l6lrtYRN"1 IĞ"&c8}rAjP+,cʀ'p$B%iZP)FA@ JH}PҩbeWyZݎqM58q u; #Ct$.b(N^sQ!EhZBc0 {Zڑ'ψf~UhF5|Iǘ2@/ AB0kARDYJYegp0,clQO=بKViǧJ͋c n:LyIt!\HuAB+6UlOWlS=[::elb,}B)xk##VyNC8o|ⵆ=7HcZ_.~'IJV< ˹" f{N0|^]96tttD#dfhȸlZu|- lw (I y8A%v^yeJ51ނKEDCd6}̘Oy}}Y<<`k=*5I*[ {UNֆi?$t\Aр%en8z(j.Iѧi쭼Oܠ M/dAyO>!>d{G}zw끴TK{]OIYmJ;v,j '9]k˟aݓE>Fl)*{ m"<|zgzJr+kPAkPAPJh1M'c4jح_'no2񄩯J~erL~nC#(N7:+,׃pߊ#63xFhm<٭6Ѧ)gq 63xFhm:(追4|_ %}ٻ6d eCg{I5>%Bp(bW=3HC5"Kf_UrYt1W\PUH;0d;Qo|B>DNWq}P/.C,ikl ᷻|TK>ɥ۬H?z?~3]+H'bQG+냉ԃaDDL ` Xy$RDwN'tm}L L Xߌ<6{v56(@M[FWS4S\ڌ\.ʦσGx]/覣,YydcOP M4^JJ8%Tb%0lind@; 1S6m8]n5H5fL-wyF4Xmzy?Ptty:6~;{LQ)EuLTQvcܭh*.v>;B9a&:cWsS{Oը3|W(<#ޖ '-F_]xYqaFkr/vb\¢ e6HѷȾ7sl"htqۉۉ [MGϫz4y0 ?P=ZCKv,WLwq&WFlB#$s=˕2O[WyOQ 'x0^~ ?\8hreqc*h%،ƽuH ÌМD#[|0rΓͭe)O>tܰHtTA!JFFuYy/%M+1l.KTPM0E98{M2!QWW,0a$ًq/v?^E}͙P)5Gzs+Z`Ov8ɢa5D+%rεuםc^mikY܀-ݿ4 TT\U7Zc8#f)\B.ԑ HitL*"˼& .UML:!H&6dbrcUc?҂'mI.j@kpA'q1H/Ylϻ|+/EMx|$NvQ$OX*([n@Ȕ#v 1NwXBHQEQА ء&C5RVFV2W; ۻgDeJVtVqgُaofo*&sߙ`ٷF E®2,*:MÜe94o*d=+>Q¦e87@/ْ"l_`@2L!ɅbNLQkۿ;)&OB_ 08UiCeM@CR\t[ϝ I>l|f`VӫpLDk{;k>ٽ(X2q m `i$ß7 '.A|=Jk#L~TΪVO>L.dͷ {R}sFl0r3) 6oBM#i8Gn4#of|a0,`$Ňt1{׽ɚ G%hE6ڸVZ$EHpR7EȆ1KllX94=N [ө3GYoP=\ot o?c~1QO@ v`)'OO}<0lkho:4Ulnso3.ks >$lR- //7 ^RD4$ӽԬ|`!@&g~6u)'o(gYb6+wKȗ)f| ܗ7ل?&ON Տnq߼+I^@82=QYŝL52 i 0Uy*ܙ`\NIz&kÔ_5j@sN""$1;l4A:FQ  AΩӊS稢[;IYǸv^| EХ_:HcLp>g.=W*TE߼&m3Q+R3EyNe"`?Wa֫[i (z%$%9ɲo&\,^}n\ _ &k >vEڷ 9;_s`zo څJAm$E%DZ7^ uf%;@ib<~ &UUʒOs,Gú>>Xl.y$IBXA͑!+zUz7Vr9D\iFVmoO~\Sռ{4;h SVe~"1vYӓ;TGm/yk-?}xF{AR<@rFϵ,wX4a? –~@>ʫW+nq_+qIJN SHQaґbC8ES<`s-H2'e;ghm OaFU{1,ӕmTQPrY04\`D$4=+|˥1 RXa}"HsFHq^of*q]AztiMs͘}TA\GF 4H8wi5BZ(" .D (ApGomNʢ׫)7y̓l.b,+mg= tFٓmn}[Hqg<˯³WJ.aQAA*._4@7V9e aN(Esfͭ$G~Z҅@=@g4ݫQYlW־HE!(MbҰ@? N(hkUܫa'Q&9ʍ"VFudXD/sGkZ )$m[SklkZBGdt%U4.i.G_}4vE|v,j!AA-C><>ɍȅ"ל6-hDO.gRp$sH' ~Η~9,Tno <nfc`auHϋiG7-}hh%kv]vru6صEf}{r8 x$ )[X1FN"^ˈiDk45[!-m@ؼ/aȯ֞Ue$!jbɑ`!JMkP*vo&) @Q;7 qfn$*:JkYQmm:I4\Ke%qf=a6(,r!`f29$:@Bʍ{@ Ҙ6(ƾ# #$$Kj7\G!1z ",iP'LR#1sVq4(Ii/@ őI.YVeՎޝp+1r50[DEt0 #Ns;vg q/UD1/̒\ 3Žtq/Q]Q;̥@ .0A(Da*&ݸ'B1E: Z-().3vwyigBu-=T~?ɵS+*I@h}ש+U4kX9.ןZUv lܳwZrj@پuƛ;O\&[]_~ۜ\h YwcouU[y>~L,K^Srhv4˟6ݶQC⑉`M&zڨ榘,܁+d8Ttsu#uyq0B;- $!QL77˪͜9etkK##gF[ЕTdS@ybTE{s@}cYdfv66;_D23}^ZV,9zCXHMDнpsuDOZ{(k%5[SF$Khjs83 ~:4:k{LwG]^O d⨵g{ ѫg<.5[GQdts$^""| Fbf.clm'o}4 "@`7AZ'z_T2?\,k51K[0 T գ.{ʃu!?9hu^i 醷߸vr"]+};p9yPۿ7&Ǖ}!`.nӇ{EI_GsYzm}pSlRKfM\NkON]JQ\kî{6욵u<I>p%!p_?3b7^,-vjnW(46M džb;^7^wxYo)fyiX/j#錘bI4''Xsfi#_cQHY4qa6/|1s}5GsisꛜD7.;GyGG?=~=_~'`9LHsAf1U|~rJXTJVv(Lq2z&A%9IJh<3jiMgKs3ߜİyf|`-rbH_ucv^!W⚂WffAfs Vd͹419S+gx8p,-W^2R.ݴkou>Z/BYk*Qc-5jj> @$5 brdG)ԑ3[_\jmjx.Hi: _P\؜9zujћU/qdNkscdr&(]'۪v_I4'=.)Wal>+=&s4eS \'LDގOmK`?:9ܮ)3cޏ> ýrTOȍDZ)Lg<>6 l<ƽӀ2 bЙ*ۛяh_Xxr(Mpfs{Kp)MG;|ln|{-3wst6w7n~ /0ӑj.`\Uﰕl,*b,ОIyb.g;e j~2zeª:3 y-gibxwϩcQ笏vr8*A q(]%Kxfwŷ߀pu^ܰ_Oe +cPس@nzgD7ڕfOjcS^;?,fL9SS IsFqzќ-q(M]L Zz;s%pxHOje4HPjeiTsg6Q"0a;xM&ՂgVB쬴 r0wt5x83[ } qkcŴzɪ&H0C,`̜~ַ:<2UA1+ = l>vz:XcRCcyDc.  NkQzI]%/f0a1JX! Z7#>w oo-#(Q @`2<#J>Ĝ9~K'"E[qsV^5UZgV-ںxOs!6ݥhzr=QVkD&Kt [I~Iakp' ͋ŕ"*@lM7d] Ե;:D,Y@(`Q͒nB,k#P.!Ly;!4&kT/3^hܜsHH,O^-PTj@tԀ-DւF`:.u`TmXڑ *⩜B֮9a)ʒv q F_θsTbD,mpdlSDdA`*qIK ā W K.@F,w-U !ħt[I{ 4դ1c9!99`!pjbʀvp'!&$_{`/ 'WlJ݅Z|񮗉wMGD1!jFM[@ G\F'eYǬF8 ]P{TV` 0 8Ādd^X?:K‚BgE zjV@i$\@2@ 2q`0#ӥyaY0FƁ1@^"dQU@H\ue(vdl( NES Yߚ~^m 6YKaVFb`;>/M7G=g 'pC$*#%.p /I9v +buk)T2kyXKfQD%*-hgWԘ+y N6^bEjڝh/"Qz[hIzf΃ b8#("cR87cɼ6hXtjFX0a1fF c9P#Yf RD'JZk5Ҙ!]TPgF5fVtokN~,UrgL(mոDA7|A2.:Y+3caVHmk p3.Gv{fZ=?xbnDU*n{;z#9`t5$0_cݵy(n5%{Y:7Uk^\ )AU/ 7 ϏOߴ(H]m9rE6* ࡭܃D[)6ww 2(0eR-)#K{=xBю-o=zk`}fAơxD vCV$k#MvbfZ%r,a)RQ |40‘5 k-*R87Q]gqϢsPPc]d[*ciep `N'X2 'ѢsHW +>FYC}p0r, ;f#hr9Y&0t>tbW[ 4bq^,_aꊃ: "b0]`j?Vv[F^|pp|X A5YAη}9}?_mJ Hk%(Ea^+JhZ+JhZ+JhZ+JhZ+JhZ+JhZ+JhZ+JhZ+JhZ+JhZ+JhZ+Jm%PBd*TM⦧R icR@JO~,n4X?DDE@yB &vƏWqxZ͵Ao1`Ʊӓz o*熛[7;:pUuF+KL~ܶYOw;|3?vmE_^z vm"?ۦ~,?zf7hy~.'\6h=1WZ0 s&kb+Y!-HdNHX_RvGԜ֚E"wH>2Bc{}Dmog=|f?O_4gNW޻2jP`.sHDXִ Q_=w1σW?{!ޕ6r$ٿB̗b]Rއj챁1p1<%"MRn/od(/%Xɼ*#2"3 טa={E!/Є vzX`nqDN嬋ȫ嬳(g\YPW*<~TE B8 q'DH9ϣ&hA׆@5C,]e׊z=ybJ2NI1EpgP%e 6X1)A{oAr -%I/fN?O,r49Ba"03+j inb`2R3m$K{IP5901]7+N$LFOT:1 ~irϷ&0gu#,%Fr9 B=ovՁ&v xm:Eqk# Hq1+ ʢ;ŸB[k#&T޵ODow|*EONE:[" XT-|xqJ"^8Sh f%mc/G!DS>V\Y<1V˩HGg+ԤMMA*RELc>o7BZt~_ FvptT{8k[LL;ig3lʹv6fL;ig3lʹv6fL;ig3lʹv6fL;ig3lʹv6fL;ig3lʹv6fL;ig3lʹv RMɴtx=]A_ , >,g7A;┐HHHHHHHHHHHHHHHHHHHHHHH7UQ~RT)&Iۥ$*[}sU]wSnx:N7GS`mq~-Z[DH(򅖜N)rS@"T _ |?|7˱TN*Bxm_pbЯw0nPaspf5wһO{we[/R(h/yEnC%X~k5Ciں,^-sZ[_\ ־ס=wû q:)KAP;~&s4Ym~n~+o}ْ,kƭ5)i匣猻74mMɃIp0,ܧn.<6R SCw9xKISuq Ih 0<|A-6\dk*5N[i[R&5Bp5tWN_ɃLgp[WK4 ~;ǡmEpI>[BMKu@0n{ ^p :.Awz5%1V'\^RK_ pMΩ+bi8guJ\m{}Q((G %-e(iX1e"↥4nP WWidklK]Tb%q%ny>jѨ]Q|U ̒.r$ S&mY@Vs6xbO Xn}mhA$ah<-6 2|Y'Y.OG.+;K̂sų"W/,XY(Y(CY&S˒ubI[Nvm6B4{tݶ d. xPL&L;%-r5:Wz=L9HJ*iIw.S$Λɑab& ܅zXУz;Y*v(VNF,Ѹ{f]]re Km_P-A5XlIUEˆnbӭYNtadP뫞fϬP>@Iq8vtpY8J&gX3Lf6|!- .P%QEW /C qaF3A#/)^ &(i2A BPOUpwv Lp0{Kp.ѳIкJ[#ݎx}3ݳVZFCfsو&{^kԹ|sn~#ySLQ"2^GhnӤMwT?%#zG2+_|YG}dŭ;֜ GSx[A{ E)klhD`/S]X]$;;bnc֥hԓ+`k.oi7MTӣU7b-A6y&ÎZ"(P)K TV"ZQjD#Hxo('ѹ}ZPel|ºOYGZ"Z5\`sA'q1H/Yl6&"ty?ִ QSvQ㴟0M@[n@`9 1[A:Z9RE)cTZ4 zP! xs)~O?}?w߿x_8Yk:@2{4-jk4]]e][9}7r+CߕoK1_']8-vXHWA'|62?鹺W?.^[_E>\T#W n0zٿ=& uGuj:I*$0bBȼDĤNf*%ELt/;It輢?OqQ9]ʱ'"B/>$"Mcy)$A|ɥIw&Ǣ7\|CVBU9~3qaJ gدTl&z_(~2q=2qW`˯pf}sUŽwWaZ|t{rJQA)3ZX$|7e0]s0 wk[nKx0={c0 =__K콵k6u͎88eGaȁ9 *X0B`D$4ztv&理\~Ⱥ 3'72yBnm^xTd$;B!iZqd ] @IT8 o'[[o_Ny=*2:jieas}ެoB`]aM_tZ.Ro<<\|ib.3Ň^E{ѻ\RϕjG^C D{Ro/鍇YQ Q /0X;"$BLGo?aL/n|Oi.hST3 =+,PAs僦p9cq4cIN0P>[gǁ(qZ灨;Wj|w"sAtLG eZ30k1hFs+%c7=ͧ ./H]0Kbɏ}ٞgF'=iZnI2K[S\ L.|nޅwz8Vu֩kNִ6+صYԻbd=;A7+9KQ&.f;L}[6|tg.Wizռ(E楒x8P_t{z 4f_R7v|0wgK/fM][~tnuous҉疏P_P BDpy4M"2)% 6(u0EA3ր"%?!0d!?%0udGw1HCP`a*j#TQ֪W!)NNR9ʍ"VF /"( -J)'RHDcw˝ugSN晶i|s'{Ko 2xoy!G Ց%fPƳI9w]`4BE O`Rp$ H' O damo 4QFA9SJ#ZY({{"DQJvW='/=;څaW3-s.wo3^MJy62[<[Frb~8H( 0,D@ `&XQI (0$5I XPD|0ar{+%RcGlp5WoOurhV`}"Y;bt+WwC3CT|༐:#R."Y=JFHqb'Q-޵#">K,l&; bEZ$9غXحe@[&٬b}Ū.EJ= )t+pTߖ7q053i }dž"-׹*ڠ-S f_gr2 L>OX^CPf 4sQ0齐X4M5D#l4CV2 Sn[U;|>4,+m\x9O܀6!7dHb" KH\|*@*а;˪cUǞYiv+6ke4 rQ$}@H:ind`3б iUӪRZv=Bvnʊ0{JAD@+LkPHZ\U2H 9zދq~&`p{H2 j"89ޡjąz;J3OӃ#0"0 9&Prt2A5>Dʔ4~{Rc襆,tsB^<)` %,Z{bNj]l Yo44wɨNiY_|Q/.qZ A5~ 0 * -lH]Z)RykՖ sε V*;ENYk.(}DeV[2N|j-S2j *Cʒe tj@$G^1GrC4'eąw988r_Xqۇme37B7)nBl5~9~:~ĐƫB3MV"ČB8Zs^K0S;6A9C:Uܶ;-V:W.CgO11'ǍB-)Ug==.VA^_;XJX=n\Q垐%Dv@{$}0{E\eHz{f,/r*$UJWI*I\%$qJW*I}$qURJ WI*)\%pWRJ WI*)\npl&zhYf,s@(OB,86ktnynwx=[m^v`+_%:V*_մi¯RU J vCVYUdұFh!ƃM)FjVRJ%$HR<1Z:rL"XC* ΑN IkJHRh,7% y0[sڶY2@ӋUxʯ(,|LEGhsӥՏ8;"1_'}rw3{s4 #&cy4Ά\Povi_nڝCnoyu3W3o3𼡞hxQMB˭.⡩|ӯ={'pRBȭb=asQ>J|(j8ʇ"-)TʇH855X+~ׯ^WzJ_Ϻ]w+~ׯ^WzJ_+~EFn@gIh w|7䞊, w mSosq uW/F]|aԀp wuX`ƣSQ@(62,@6YQ08x; ,耳4޺a.c=X"g94!h%ۑK-H}&WBtv~6kv@$׏kE1Ʀ.BWvyw!\Z5 AI;CbV)UQ &Y'M* e ą`zt7'^JtLBYC΢-г&Ζa.HՔi ϴfHHRyJhSp)˲qƒRIPY.u d3ӤN,[vsRZQD#Sd(d)"7[ KGU;1i:6$ΙuֳHu4)Hys^'iəiihͭOtڑ:6 Hp 3Z^t8pB;Á]V np !YV`J/ZJEvׅ8O/(RҋYzA!B3Ş%s/UMP2ޜnI% ʤCݗW>;} ;} [%Gmdf98.'ytBR(Y#a6@~\&h4r{4I)akF$NɁ\zgcXȆ eSg|}1]/o,yMYz 9]0\:p^]% y50{~\д)6dTDuLYDtNF$+\=XvP'ڲmNp{&2;ɘr4k"+S2O9+{ֳRZgfD=;4QP@+C8YPPArT&մXa֦$ir(%UT?_-?[$ʀMF1G'%#8VJYhg=!&DN?FVoV iV_m؇ G3^$ (@#3dHB 3dJ?c.·rXosǁqq`MZwYKjeyE8!%d-M  T̅Z^xDdHv0'6E|س-)Aqޛ2@/rX`%Ph|H`d)e&LbYXգ?X9c:k24xQAЫYYXt-M{ˊIgеDI[&$ܸi_*|)+4 =aN.sLƩZ \5(@tOcx4.%מ G3:$k ԠKVdt1aY-MFgi 4߳:HߡLEZb)2%$R^Q$x9@J1Z̒ C='l<R8BrEwE#酆`YFXL%RRKKRJ}a2!p < Od69l:at(I^Aaڍ c)"XZO!i7U܆$M<9P+6ӿqU0ZZw&UNVɦ`x9,҃>E|zb#G#L_G5"kqzѫWǗod 5 (.y44 ھ.h8#]|vʳ 0G7iqh~{ 1үm+^_].>x+a>/iONc+خFӳoWm MNG`'k;{uOhPݬOw1MZD#XFg@Ϯ_nx8xgmu]v>+9iiM],򨘍c~Gg L}Q4'5ol<_)M?9O-߽ˇo}?~yM+Οh.S(;a"'' 2woеkoٵDآk}>b|~GZr[跫/l5,]jw~ʆIx\bqBbghod J)&3[ّn݀Ƕ۲MW76⣛ķ*&N~$iNIIi@F+C! Gx\A+5v;Z$vs6pdB(|AFlN%If:Z aN&ļ`t8>yK:+z 2X|)s* i qFH`l P+T R_ق\Ww9+|狃wZVAV$8O(uaDF/ٚzn l EӇkZ|c!/79"gyY\Q[*93`| o=TjNк ` f'«bٵj FDWZU8kEXi$ 6 %zPPSCՃhUm c9H3fXFh}TQS;[ή-6E`W M0~ۉpi36WY!t)'z 7fS<%@R(8'-<QJh)1獯lV#gq6>$ۆFppՆv΋.[3gOL]jƝvkflS1w"cNjd਑%2ǐ %WV]U@I2'CHJ"PKd&yBIݑ t8j H<ǸQ^-;|w'cj\L0o贏9 V JJhiK I BFt EKAInѨ9yySA^߯ŋzy]ާ]<]+: uBL׋o"7{~ͿƷӸË^hnu z~C źuwqs4 |NV>܉♥\sVe[Fx;h /n%ꏋUcֺ~6:w!aH%NPsv1<̇FdQ|$fS2,w$*4FQj4g"\R&S9 g051G ;\&V2hr41hٱ[2HJ$2RoEQp"S\)PxU8JE)Nv4IF.ޠ6Kb B c*Zmv.2o.:MKԧH 3:;-KeA;oVUds듌E|:i P"8W[s[Jj>xq~+ c=i gu_Բkm LB[+4AOe g}_. _.*{rR!| /3.G Y!k_.R'PeJON_q[qcS904T&uPA!hCndHiԻH%56RRq@Lgt…+F愫ٹpjeO I ח9# nNdqF.'Z|4Ns$"L(.(Ƅ`}&wFa@_Fd4*UF\,_j ^4}FG1+?rڠJ:۽y_[r`э|0 ӛŸ7MEiڍ^;cJ<¿S ^6 7Su q\_8CwZ4*uTh)#6OwP;0Ge\#`y:|eY!W'A(>X(6f~ahxyU1uګ)tQ{ڰo'߃JڶWdlnði{vur l(z*~]lh<C~ΎLSy 3C.>2 X>@QNjͳl˜c <"^gLRߔz  Ԏϡ[&1q7mg,9Y3EQ#sÍ긽a3޼S*-)BD t2Cr<ٻ޶r$Wtb $@gv1Ivv6NnGv7xͦUGjxw5HqU16\}qsxXj~k47獘b^T 6$Ķ韙ڢbbm)XġR&k P[6j] Ǭl'5ONs&T_< ؓN(EXy8t1adؒe lŽZ"(P) +Y BIMjD#Hxh] >3zGG*#R"%1dNF08Qi '`@S¤ܲ7o@;d} S}3v8)0F"rGgRP;3a۳YjRfpUtb ACRzSr-[}OBwX|o=o/usmMD'e{1k^..f[Œa bgnIF@h9 ~laifF&?Vx 1Ÿuo']x5o'7G\n/z6sAV}{3+Nn't_0Ҟ_nnDPyfyBCW'4iu;l_G+7.WV:^U&[:La"#coՋ6nt~QV4*'>4-_;ӛWy~{￾7>a>O~y ;0RA$IN]^?zeIT f8I,琗{_|"~kg[n-@~l_ $;>IN]9 (䖟|1nJ-7n/w" Ŕ*wkt3A}uboIC2 /VG)O[@l1!pd^{; Rp'NS`3Z%ȝ Ε+N#E&:q8{Q9!J,1GRJ*R3P'ˡQg-țhh@ z :]Uէ5cO#Ok>Q+#q$*5*HOQ">#~FWIKj:Rj+fJCMh|RV. >BF~t3xƵٮ5޹~t'8@rf4 EvЍ ͉$Ԓ5>Uuîv.Q |07כ{z}Jݍ!LgPId5ᯱ%;7+ScW/62wX6xy:a2:yj֧R2㺺., ²eefʽK" bJK*1Fz}X;E#fm5"ɬIшE#f)X0BBb0$^2 ނA9N.aK9KN:T_$HKIKb Tn[*QQc}5ǸuD%0 {/UD 1,RAD􌕌1v[ؾrQ`}JGUp jD @"*@Jm 4)֑5γ4#t<w8Lx]x޼|Vȕ Z]yd@g@s1xK٪OOOWӫ4:}f DۮͺAx2O(7jz<{x}$LxTW‹^rÜQV [YcI"%^ܡ2|:Vqm:)ШT@A$+ T1PSFA[^h;7۟r:r6ȹQjEw9lVJHH!$wh.rj# m%tH+^ 38+\!%* ?昮-5kl&X0ќv ֫w:ihصqn3жRmt3ÁʥƛF6 'd%<S,bZKTJUL'h)XyFVbuk.נ盔٭11VUXAr$PEaf{ZEScr5MʍVzh;[M(X{-`AĮ ~Sz826 o᧵VT/U&%},VKG-&}n-WWHt5SfIQ\4['4e VΜ"=mgwllwL\:}t.ԝiO1r#xXUB  ɣflk\7w}?63ɚnͼ0Mk!/zVs F)c-cq,0:[9AoDt A%bTgr} z={F ǻ:֜cF'!U;F;)zg4zSY{dp^$  <0Ömo2`]%A^7i"K̺)%ITo?R@1KoG=+v/,pjmo3϶q,:n#b`z9чE7ϵYUz".# #SrB@0{L\@.'s{bP)qI xT24_7d֏ۛCsN ?5@Zsw^F5}.Fɉ~KՋ4^E/X |}/%^zD__ |}/__ |}/7׈#E 3-wLRuwW"N3"^yf?V+^9`Hk'\.?Gˣu#%:2AULjiGtRz d\gNs/9zB` Gfwß ?$X~#4duNimIe;L׭mOqLuq~*~Itt|vih0A ~mIZтsOv#֜sPё)!6xѕ]y(/"[r@Lу1'ʑNFg9.'DÂj?t8ަVte'Rhktu˲&N;>of%kY'ekW&8u_ԗSt[RdwJvYI.U$AIM!y@?uksE]XdFR/=\ &`9!ZDZԊ1q $kEU\ze= 0 FJcBɐ K˯.9]`!6KGoctE@y ߠ >LZ'XJ2UE44lIz2C[mBY!shTzkRFұA:!WX 8ɱT;9[MC^RHR (&OF~'נSS&.Ɨ* B`_PYl]ua,8*I~2h# -ELYnM>;gԃtXT>YZh!XwΜwHXēpFIWИq=R2v@26X*3oZ޵;pnvVqnp}5u2!_MBTyhv桅J7}CBA N*sJ%$@ʩ&%Y"޼n( Ա?XK;s w@?Xoj2l2C2./ yBXPjirF;ADmdL0`J%v9B 68297#1(Fcjl32D9b~f̑X1"a` z/?(G`A(*DT-e$#YvP61IћI8hEN5KC/5E[F>ggg B0s`DJ* |6)b{S YAx9:;Ioݑ{xPXotȢ”8nqlѠ^H3f#+u6)^^]JGj<Ďv$xj*~oKhcA$qU@ZYWeBBf-LV*˄ !dBkUR{jRU`}Vc ,|Xq!YH/\Z F!IJ&DL&U%XeeR/c 1x594xWNЫiQя+-M{˲4A.HcgH[6Ye4K (urk`2^qFJ Z*iLkbnRGI3;-ع.3_h#!7N9i,f2H`W $[3jG Eme%ia-@d2$Ch$bp.I(K^r*A ]5rvw7CPR%gCEMn8llBHLҷ\J'vmQʇcp&vHMLGGl>7kH-R$}'PH`C@PIF暠k2WU$mCExCyT X46<\[5gdOݶ⑋X=OÁUb(PK\ U1yeL;@\ Is%HV:Z 9=k7ꖣY*ZV}&Ӄ(|Aqn@o8\}Eg~#" M}ݻNW?{7w F9q-pEi} Om[0T7ICcQ \on]v+<˶=ԫ P]vQ|afdQ*ngLl%~)^« > [r8AK)i[N t_e1R aq7sVk :A6p5H0ipǐ,i)$!PS;:1ZOxFGKώpſK]Y[9@\\i݆s _'LO+o7iǗgFGGqhMM#4wf9 vEzN>27.ͧ9n/.FucvW>YsbI̙>tݬ튓h0>|u:?-w?IƺFrs$֏t5 F*fY~!M8>o'ZEG׋ތ9<^nZ;*gG]LkιZri9_'ҍL < XNp+x}tM \Zo6Jn,qBr;ͽ@QgE!HVAַ]3^i /.mΫ2,NjrUB k{/SV^~rAΎ^t{/ wqUZuJ8,nW`Qݱ1j3ez| lw/k$^0'w,^B;xς ZX&Z^:Xz[UCUQRa0n  ElΜ߸cx؄xQncw=Fw2(q7 PG~ORT,{5ŤZrk)&UUЋI*IbR80W=3Eͬ!U%7ը)lF[(L#!&e140'&)A'IvhIAOZ<%XccT`{5فBH5LI:hp'DUe]kU@ Tt,:!p2 IzRj/v3LZ6w5r l3GˮWsNT" H/C^vBƘkN6e޼ޓyī+DV> nj> >̷?l>(0AL JgzF_g?$, eYr2󧚤d%YjR-bU_]Uǧ?ƣI}wDSO =h-}{&L(hZֻ.qZ!FuVaܳnZN+P? F`*KȡU}I$5!t@p懳hhh sv\< 5WI\~0pj*IiW`$WI\@ZUrdr~;Ix^>f^j$8h^yy3I=?6A2C8b2:L%gL 7.TKAе+CI`u8nlWCZ"@[b _蛾oiRʟ?p:C4u;ّFT(iv#YPF8aQo."!LGb480X[nR:#B#>''#5`MNڦ xd I?/jlŸԿ k0*ո֍М0tSm_w}x ?3-tKI\|S~A} CxjJ{95 )Y h戢Yl4L P!,v!g^^@z* ,(޳ :t &(ג\ _K u(|-I5HP^#_XŠȼ0>5xۄX"``xơP&k_מ0t ,;b#vNv=w;J8hqA<ŶZR6tќ  QOYsnqV [8m@)^#,Wι#ḁ&L$tViǬQFY p-1K#YLl⬹L(oQa$Iz}LMT1oNvNkwGsD0QRHb 0+Yu(l5"g$8؁怒@s 7) ?`3,з. HitL*"Q* J34P-,"Cd]BcP)&Ulwyk먿wI.j`h!p81`K&"JzgQ{C}.ÛzM.ij8?DTׅCٛ7ɗ}=p$Si&1VtID0kt|!cSn q97 N[lB4XtHXܴL\ wM6yҠ0C<<_F3uɗpۻ6TWTl_BwY͡"` AҾrn= RGW68q GѨz-L~{^}yJTՉ77ybX  seqY-w};7IL]߂:wg nm-]5CZ63,h}*|LFpp;UD?U:}ȶV[U: jLFR矣K)Co{ 6N1RM [rS:G}>sKP1Qoo`,Uo%ݦ5_д47m*F |i|vVdn2r+sכ ~Ryi!ϪB*p7j'B*crOb_*T| ,3ܕ Di!H;?ֻ"p_ݿmN}ZzJ1R}6xNR^ T! #UI J)Ý3Z%ȝ 9I40ż2qaKGRD$?|Vbg&HLj< Ĕ"TR^58h-dY٫RtbRX-6K;{_6 e3f񛰋G*:VL׃KcgmI ZrV8Afx}2lcS)W1eU+/689#/D)gTPz{kuMtFt [dNw\ѸA1]\$@lkl+Y2Gӗˀ>%mL?zA:y?3aR/1",DDꥦ0+R1c"Ofm$ f)Ϡo9nܿL[$rMOM.{=~/t+WØw{59,즳-=nCﴬٔ7ZsU -[eZp4w-٢iy;|wg ,30_0x֝ VglK _btԢ^94ICjnܷ S?ALmK6\p(GKt(O(` CEv-i*zRT@A B4,P#6zVEͽ @4EIQ ~Rn&xD( X!1rbR5Vp%1$c6qV(OD mFZi7k.{>_X1Ń`G}^(5N}npU_fP{\n.òМwQ+0ɂ"/0R ;:c|O1* _jUK!D!e!x0k5f$"﵌FMFS-~itN~uQUbUɽc")vQ,90@R{5Jb(HP$CR"jxT8!,؍$S[)rl6p1H?X۶?4Tv_"Lɦ&zO"LmUe:{-w\A''Uv+cǫ;U/T[b?8+*O :=3Dw ɭs>8-(Epw, Ew*L(" G @OpiP-&x#K 1R1g&HFlF,evͥb|p,tJ'${&7iε%D>Y%J̶#nmy[ɠl o/[?3b3 *(TP ("S*FlKr%V&8Bݾ*tC%$ؤB:`^ew) Ӂi]al9/rh}Alڱ)j̨;2!=R(CADk=5VgiY-58a6+F.qVqCfxю Xx$G(8`#NudlʩfH%`D̦""ɌCi<fn- \8&R=60Kt(0D8V-.OFHH8dzHL8cJءA94a#ֱǹYc.4יMK6Ei..x\$b4M`rkԡ2 9FY J"wٴcSMá_h|^na;q9}wH(Cvxr.7L.* Yjg4xNYж:& Tpp SA"[dͥ#ۯ^8u'Ϻٻ8rcWKpŗElAoaQt%hg$?ŞF$-tfWNgH׮s-j׆jw\oF~捿Y\[.;X|\^Mdz:Gs/O4ʴڮ1$ qur5] b:Dt<;q/WJI4h,SaLҴ_iCӻ4m~'=SNbDvgCXnQDg`R3{i-X# uUG~g{Σ2{tt?9}[_m8 3@ +}׃89|]s0䩝0o%Ld.w7+rJoGs9w،f7Moly0L60B2nNeמy@4us7 g&ܖT~.Nsܥ20JJT[1oƳ9!MRstu\dI?W jK9aw䟛Ǩ^.S#W9".,gBLL3+L4~RpLS_V '/< u֔p^րz3$f, > bALN:1{|M_x-.V҂Nd%4րiڐaF~CPOZGιviReϲ\z"leHV%iGd! eW~ A&A€ʀYr,q\8-<􄊹%x>ߦy66-b:Oܛ2,N,?0onFcjG_3z-MPgEk 3Z oךs*:r3%c7AU`w]YU%/. f!TғRj65^w_Ħ͉}nE_1x{,%V,oUDo v|̓eYrAp2(R&8u_̗St{x\R=T%k;$WP f*X+ A>@.1@6Yȭec .cd%Ef)0Oړe\c,'Dk⑦30 Z T=&kZقjV(/&g'#0ӝµ2r&M_McW:vEw!\Z5@A|X'YJHk`UQ CL̖'3u&Z e y7ȅ`zt7B O)B9C`1$xNc g˰]2hJ4gťH/2$Tc y46j'q4xгCCb%S+7\hT-FeJJR;)H8\yQTYP'ڱ@6LdNX4TSQdel:dzVm8[YW5kurIPr_b\KkCWf}䴂lL*'Eo;M& Gk-t*Gy̱*PS>_,[{Dp ٥mN^0G#zVrIA!@ؓd-? PԊg}O/~w\IBiq༁b@aJ́0FBhW1Čـ2Ct1˫W's]/;a.{Y2)Hd!xO*C4)ZPIB\HUmOGDhlWS>g&"OU>[Yxq!\QBtL,Mք"p ,SԣzXʸWiǫ ^͊ani[V\HeBldlRVt&?h  )Tl:5HMWmj&hP`/R{rnBL䙝2ݗ'!dHz(NKY,,;%1&[x貭DNhZZydu!gЀbp.8LIT=6TvՆ0  Ko\L% d70H}vi|t;bײ=dt'tdzglrKpE5GTU@I*#! 2\*E.E0hԝ*z:ZTϺFO.GWx-_w_6ٵ6A}_p` `+*&֖i?q%HH:3WeJ:g̩uhԍ@P*{G.v8* hǣKdQi۵{;#,UnxW;Qn IAcc ; źmҍ쮌ǘ2yPȊ:Y77W.oX\Txi[Gx[X ދ>nZk\Q]KCW1ǍR~U(FbO~N)YM ._oo83jF[(Li,7/$ƨ<2ɒ6k qzR c*Q^mgUʲ7>^Bb ͉%7u %ל9a`7ǤhE)ƫV":7%k sl$'1@tRm|;t9[iTvК|W*_mK &&T Z4OyjbdZiYHInڕH}ÎrEEq BCt х2ܢgHjU\ I)eFDŽ"z99$.K01J*BSm9)ܗѓ"hp^ 8'%ďcGg{weF )Y #"WqI0sFIZYb ϾHj+v<)t1C{}I- f"W^=(R]Ez!l3%Dˡx*YY+<,Jm}nS\ QOKTG7??xS{hyH5; ^,;xSv7K7oF؏S-Yz0H$MhYG$[d}ʦ+pK@ L1L\#2 ׃%-xŵ忖IU.pYKiXJ y᦯4o'"c _s?/0<6Jwx_G]D;o\G5Q tĭ.y44™գM+d>pFg`>n<bϯ^x{uy:0;Nޜ\ 9糓Jw=kAx2̓{Gⷚ3u\gO鼫ٍͺ'M8`b'㳯ˉ]yr~r❽ Cv1uVv3p#cE4f8QY6Mn64I&1od<<'$ӏ_~ﻏzKw4Kiܳg @>gtw??kSkoٵEk>b·j>rGwՇ[ufO $W_ގNfbˣOp=0,[AdGi6zb.Yl TQ2E v-!8p:VcjF:61r -k,L!*vpfe/|q)Mi񞇭o8[?_lp|9]h{E)Ig?5Q*x0!!0zdDve ΈpDhvf`x`PzSJׅ29m\jMLJ0&0 )yni*iJՄ@޳e釥0n)ɤ-4YOqnH6-P|J`XxDsKJU^PȠLɌLjzEJ^g+]Mh9+3E Z)B&E j4g͑~ݚkH7%'ȴ>taW%wfjyVeVisזY3= B;.N{֡v;+XQ5柫0N9-J{ލ:>g-s|++-qY)a:߽[^.a >Z<2eAY}Q,8د}b5|#ybš&]ra,ZKוF(nuOyq_Fn6$2#6zqjui|qhU'oO7yPGmytGh5Is: IϏ뺞|xԖfع&Bwo`Mf}^rdG܀[=0#{ o_({6姣Ūh֔?G ~ᵘsQ}|ԏ8~3PY+X(l OY䁼Bʁ,R#UNO%ȷwFՏR31_ *L#sINT2@4Uj[(Z>H&|uJmwC=;Qu$O'Mܛ)ǘjM9OKvʱMPsVWA:%'F 97;Ѩ5fe -!rf1͜]Qt|Jpj*ӮӮq"Ӯ1cW;o>Z ʽ+Fǚ)Ƶ,2p {/څs SΘMeL$"rӠ*)">*BO `VΘC1B\V׎g`QꍌُY㜠37z`}g^f,!׋/.ؖu;ㆷVZ拓__'ˋ'.%e"N5dmBPB5bљĢcYkkKYI34GElJX` XSq] 5sUs/s#v:;-7TPwug3j vբ Ɋ/;MI@qQL&mk1"Dt +VrB(daډΖAIs(%(":a7s(70N}tFD3#⌈<^xM0'l$ChMS2jM( ' ڸwVmmgjÙTP`s%mXIuFn÷,: ..74K3.g\ܙV]a' BgVaɧ6ZM*V?%J"ьũa7/xxϴ:=Y>VK>rhw1 >}z%B;W?ΪúC~8tX(*Kj0AZɿ!dܼ=r &;m@|,[[dk.T.X PAlrT -n$6e0!]>9_ /^[r:麛S{XG#wJR۬Ωɍg1C~rmowMrVzXnIjw55,#=%wlG}S9>=g728?o݄>|Mݕw7wM_8w~;Nd۬I[Mm^N9~wv"49oC}nll#!zC4s>Sޗ&cm3PQYh.N BE%:Isq B] B T:?b#T;rhY5 ezvKq[B>fHy8D6 oYi?".Bq, /Ҁ`@oHf~CDZs ʤ%SCЦ؊6r! 8)jZSb<=v8l Y΀qSa@΄M aVQqVHT?`͜;N-|q!s>hz Zӑ\x*sCZۿޞ 7 2 ɷĴb6!To2`!эXk(23 $SV&cCH:jE2!ŚXf&V& `Uͪ&n[y6CT(ȏ''vMNܮ)VuŬ,IYh0tW/:+|ʙPe-TqRC@:+z ە!u]~Bw]̶9K zSFAB Ѹ⊡ƢU!e6GL t:'?}3eUc!UV`1sѪfDuL ]F*q17e\.[|`":_ȼ,9AwVƱE޶fsn nxڭ"@M RʬFJqIq[hXrE(z4@ kcTOnvI&k=@b*u-Qͺsa/))]h X_lbڛ\u$WkSl }; QȦ|  B1ԍx,z$~ޭRZ$4&*T T h1iclJ1V䙫H1u;\R\$x Q l9gJ:&HyOoclB1rLz8p-Bnc[m8p`<HÁHk@:Y ʩ-!0P0e0l[@!}'7'4灾bXsEkTeY U(ԶTF r@Cj;c An-5BMb,dSTmOvQXN䢈F'1fL 3 E7X݄8=cy{ES*A{oM#/ArPhP֖RVQKR"wV8˧s=qmKfDŽ#٢vֈi'l:'b'Uɂ|H ߰FײO2ՙh9) Bb(q͜;W~a6:ٛ]*2ELIRւgJa./OR ]N4QF_3knk8IB\cIk!L J!n/ Q LQ|q28o>Ґxl HZ?$ L@CqI3tm6*dmWI^%"yAZ %O)UmlRGDĶ˟-MdN;" >|wQq)Abl]Anh4B\bU/<ؤE!Sjr:L'S^Ja,rIH$͂y#-66ďIlj88zwgҞhƾR}| /֗"/+UX.ߟ-d0x{p_z|Z@q }Wgޕ$R]RcuX1;EIʲz1}#(%RRѢ,GDe$*51 Rq|P& Gr/hgI}ek26ͲY|7O?d-LjEH=lΛL(io;;3&y9-߽$~T1Y6̥_gomj'`PO"x0 #zOլLbl :VaG5"A“uIhGQ~;\u4p=!S{J)!\b:m.5*BLQ8B"h8)\kJ=!&ji.*`w#!#EKY]ǖhglDo }B(餌WL84F^ l,h3R`*py:SilrX1xδ#<&&~\(G o/X@ƃ0I8'hI"ET,#EU <1ԉ|%s*%=EKjloLǫPsCZX6.{b^pTOj\ra@Z8=(A V()5XC#w&@)Q P)1ZZU6ey~ïMZ^^Ncum|wʾX쨺ڶrEϴ]S֨ш;:#>tqiVƶa-nWϤݭCAv?o|-ڟm *|V5-N=1} sW/o3>SiT~{7\4/=p?^ Q.뛪ms+=ОS@{ hO)=ОS@{ ОS@{ hO)=$ol*(`QP0 FA(B;2Lx9\^JidV=aHo4R-5-=Ũ+=ОS@{ h+L%НzW$ߏzÖR&hgR팲52\Rw[J*TRA9ʈr 5ʘu B^%<(<3!*K>)g#k!I,r಄'{d/ّs7 xv͔&a* ah "(i<6 G%9N^ށ>ԓ8=Q+*{FYW!1QN)k_۱^jѢ-KzRDh /ѣ@D$)H%ZxDP`MQ;n eId9+LK&zESJFYD) ށv˂(Yݼ92OH̛=7>)A( f2=#pV<)30Rw!9)NJ(yY60,ZF%B'`4q`&5lg׌,[yNFǿU_K?Kjg'ؑ8[VWd?aQn~XA<܏[ V hqҪ?Z^?̼jsLrFC ;drFDI)v2u0S?/83Z}+ $It[t5l5.G=m.]-mٞ_ q˻A79jbDd)G&P R?C҄ ,{w@Htk")bR=qZxz /QMӨ2:-!p|FC,6v6,D^pϟpZ7ES;⒍N$I@k@ h4t:U Og{5 9ⴃ䝠x5Ӛ`u& :鳿d?մG4?)7=li57)*Hٻ8W }N`Գ,0ۇx0djIY_FViC吕RReV3d|AʟƦ**=Wk,*QuGX %A!TоB ۨ,%PfRB\=B?@鍗u&_EgM2w|G$8C>d״! EZ67i`2(7 d~:W #%Wg9rAYO !:UkE1g6Ӯk(iGܹժ^ڗ'#wՐD:0ڦcJA8DSU)XS 6<&7RxmZ{x|y?xfn,y}A1ThyF!D\\DW2(`'{ hjtY1Z]0,hWT;E(j*Yyt{U83CAc%+ QQ&n壋1F!/Zxeg_MO-&R˘w!cdUgĺfC'>4 \^_-^yw*OE9.GrcJ6ͬuސ*)yG&w]**{ZJMI2 PVq=[Q,5$S""r3FIIn»W=|+/|B㐛WmhCUxe|Kjn!4V{Grhrҕ(xV1Lpz\?=zd6xQ!A2<94`Nj}IvrcϕLJyz]ję8!Mlr@ALevشd sw=ϖ)<)?uB2!Xe㘍zCgGn*ڲ5Kq 籡}?!6yls.gVgo^nrǠ7(4q@9?ȷ'| ΙlxG}t~{|B~[G|p[ʧݨ_oͭ="p||=^qW1#s//x ªnl_nGku[8kg.u`5Z /@83@[~3wDX@IkC^L9j* AzM+}ArWwgw7`-ރ Uj5±ڂA<o"dYSQY.oٓ_?Lᵈe~~qZ/{a|>v\xuqu=O9zɵhR63YD[fFB8! 1g9Cbs3Ĝ! 1wBL'uH4sNt9oNt9oNtYgd}t' l@p:ue&-I{wN"?!|?z`WBg''|1|aF!0)!Lͱ,n=gHO|RɅ܄DSA+!%m&. BQէ)H u:oŬj( -,AU-D,3)L!ݛg>a'W;8`'}?aM2L0ĈWΦ:EOlU ugO;?zvMQTb[QZ<(\\-N^`Zf\\s0@OGObR Pk(R-Ѩ #GնWdceD8juZdJ0o?>=芉ƱBDh8.t`-%exbd,3WstXTwӉk߷+w!n4j"ֶ8Di!(/ CX2#e]JjcwlW,:-b|TPws

R!ԚhB/?.NkQ~j<غs^<.)_]h\.ѡ4 U;!Z6BM`iS^\[[|,}s#/nɷ'e-+_Or^3?˜Px z[,Znc Q&mF_k#v5+h_ŊM>5Plt3σ=c4 f;>;ޥnmFWo] ?Cz-Ի#]guqlcÚ|X|zk]Dxg?(W%?gS<`^ |J<;p/ۀrTvS+il%0V(@֪"S)LP~n=~YPa< mu@'Eb;79auQ`TrJrΝsvo:pzΎox,FW2iS]L-&E؁L.gԵV`ZRSܬ/+Go4)mOhhWТ@r1z%(/k ِ!)9CM#UZ7zˠ; F(7%QMXu@*A(NRo曮.c?}X/ /m-]Px|hYR|X/>"O/v (XiՏ^gZȑ_mNeG&&bؾL\⚢ԢloHʔ%>LRQ.GXU4-{~s *,]_M0V׼nWRH%յxRRW~>ϗaUn;yZU G(-@ʏk{63?I|܀z[5(nR7w|oϟϮ}"O^St Pc{\֔yewv/7h|svW-.;cE;obCa$9u'̥c2+ɻųU˳]9]~p˿f{<bLmIg4 l.rf]Y~cMyv?*~5~^=9'-7g8+nrӬjy+V\:y>*g6+yveP:`8`u ۟_~z_oo%o+_ X_Ep-?K=o[MVZ˖w|̫[y$'xԛ_?,}2_]U*_j2#T,՟lwA_INxz!xS;Nx8޾^37MI7)fEJvŬE'd"Z?"oe7e duRsP)7V7X_ݥP!PU@g|0 M B2;7ttեN.Dv^F/7}[5f+=Kh 򔪌VLqwu U,5**rcXe̴VG&݌WI/h6.v$-RU]R*t;9J9_-ec̋Hwu4 hV1T<{>{hlbbHX硩.(_egl\x{6t%$MJEգ"d'hUx Q)J1%$S%&IF5|itNQAZϛ{1jXq}|G=0,y1/|.,sxiz2*#CHّZc.-ް^ȆhMw齽L˻36~Ɏć %=/yĿ3M |R |Uq8Wp/` |/e >CkXY E,SԨۛi.C dsht@/eN?U\;\ҧ鉅6,#|>uq֭y3R>hIJSNX:&tVi!Q,Eqz)vO;jO6pKq}#u];g>AH` s"`)b\OjėxeuzҸJjEQjSt.JRBNiE5dO*KJ68>L=nEiH_t 欏׷W^F竣$6w8+~?zuǻ#WJ+9"9&L#EHrhOL5LCIjVЈAK5agD:i* z|ZN5`L(,ڭ*4R4IZ o6cGqΞ|د+L|a$wOrcm2GA'LԌڋBy0XK[ %x*2) Zv)1FFRRM]\W<$"OAf4EI"g]HkBd9YU'm!Q{y?xwmk!Okoo`y]mۂ~jk{:$xN]~KČ3Fn^@+'Ӂ5֜J:A K`@#vӨtG@: |r}[y әh_l; R"tDF}F}`uzc2nNƢlΠ\Y%:+#PXSم@řDSBFMb J9  :W4X]&2HGТPuĤ8(6_1מ?Kx'h/#S{f5>[b>9o;gLlgpBY*ȎOӈF`DR1[-Ӈg03Aj UنUqލa/*VƮB'5 "UIҗlcǰ8{6L Crab{g|~@91hRBg-AlJW\E=W +v6Zl- 6JC1F"b2L&66f6-e9LfTty(o77P,Y3Q~_tY;KwܼJ7dZ?|T(nST]QbI{l Eb)-覌(J+$- ۲% yTk]ccT6^A526g72Uaa38 7x[`{t#~C]uoH^ש*:7;<懿էf+Sz)AEH" !3BC6;9J\PbEh~ 2 *l*`uBp s;AvY;[b.".h[F#Z,Q̡v38ecԖ#j"^/IP 2ɀ"[D'U{`VVPa1} df̢̞X2I@Ѳſ!DdRxLxؓUfơ b381"ˆ#"nxk,gZFIV:@UpEJOl2]dښLszGR$ˆR%P*΄%ɖUIFQ!ٍ.2ԁqqL3fZr(.ƸF\qq6?HiU7RBJLD(,\1RV8V.q8HYY)B+SIr*o !3l=y6 4Pi]cVvpK]o{ZݏYr1rv[ _**]w+o2=_?JI_|Ww6߯Ƨg>`/8b5ڕjxww=t~<{|3힭s?kw7ڡ_]}ueZʠ3]Jw:{y ($mʏY%J78Y]#33Ԇh.u2lEɨ{%w"T\s A0r`!lvo{8&h-k%(]!k'%fju^e3qd qxGg^_&% VA'5e?zm?Gs&nr8Wo[~ʃx7>y.N`1 M~jP }aK||#YFmf{578y<9H%M&dFHIrQi*I7)4)4x|0{'U`e mJ.T'd-10 sv}^ޔ;"O%ʭO"1ZS腃AAk xnt?DYJYegp0,cɣy,*X C:ޕzRFab4i, !' QN }h`\7,{QC(}IB4WHTȍu4L5tkP`T"/ >w\tÕ z%=_|wX ׭Ǹ4[o=W>u]H7#Ϥ.SwLs><*bA~vN}+lH۶>5L+$*RXh;WJ%Be0$)b[t *}*iݦQSZT҇ \0)oʄL*KL39Ðw ,耳Pfڡh=X"g cN h%") ^ Q G4]OJˋ0ܬ0}o801N H[-0wއXbOTRQ4$`!d*bS2B!rEHg*阄 A#ZevjAfB##\޿=cx?Ab4RlOGe)Yt ёhxBBͬR>JlGGqNGH_^ I9/N:A#6?f"mL2JDrZ )`>ًň!/ 1+Ug3:jz{zzDpr>I#j" =l FMB#hc=y¨ɝ^8_sQ([cc^)![cL&,*%8ayOը~/:I'DhA?jEw E5 ,@Vj0BA7\4}l<%EXI0 [R%eab)9 AIl1[A"QԳZlbNwC> pęaO4χAi;a*߰1B0? W6@4?0NƓ-\.juNjK;)eGmGeݖј8?Mfhb&#0r7&Z G|7WđD /*_H708m`)j+yL`˗IO Qme=FۛuX|M&,]QXh]Ƨv̬˪˫$*Mn7 Q.:/>[$.YƸhE6.2&,Wo"5|ihSեdTRJy-Gx%SJML1:Ref$ys%7ݧ &x-БkBW&{<5̃;RwWo(]L3ޅ\\i:R*GǴ_[NS{blq|q7ys;N5^5{dM$σ;egRY7 q%;:A-%M&܏n&SbGidOCT怰*yO%f104apK%mɗAi<-s?/.8pCZY\#Z.íEx~)%Qx(r{KH3-,.v~s┎tV*kkk̆ٚ9s{U>09>tqd0r=m|$[n(ƺfs&Pt5FfY>4h`r_G79<N7,N+9`g],kεrI.By$ .򠰍c~33P '5{p<=SB?;ǿS?O_0_>O?H'Οi) gg _cj55ͷZxkzᏙj.cÍX*ð<8>,a7Ό!_byg0p^e!Qj`FzyTUP|5xbPdz;\mعXՇNv`RWCiT] ӶK%{ldvxw/%"wҸq:`4Vb #*W5ـc΋e:J 6Y&O i:s:vlY 0 `I[GI?0d7٪hRvJrAZ"Jyhm3d `{nM&G/P#vs5prw(#ݹ }BWKWK,2wqZÇ`-V"b#}<Ѹ8;}ݨ5 'U*ȴLjCqa#YԜ>;=΋J$%yfYj} l I7 ] oG+e~w5cSIX;X`/OgԊeaU,4"gG}]S]%Fp4*HS c1*p@M ]9C K$)P5vLլgǗW)I<|Ӽ^et2ׯ#TZ/+`9 TKpUD͹QY(޷qQmXvF % %-i+DXֈUIUHbj xz[LZYyoNaD)>(-JJȥbmɒ8X;ElkgAQT;8uޏy?jA9^~p9sjU2C YtwNƅ|bg/_?M[nIN?Ti輲_W6o\z쉼RWv!GU{"F@B}OkQpf]TWv^ rnG{{kiWefS{n)z ^t}[ 7BlYU,fm{0! ߧl?O3_Ucw&[ ]޳%Smͪ@3g _ؽM{ۛ>7Vv(yrv Ύi+fW-Mk]5އ^Mfϩ~==hN`Y4gANyV1˃aM)9 k/FvV wQdV1ȅ͛ނ5E2LHR^qB<68%A=w |O/V:qM13%eBLBeMi.9`?S5X{9eV(K!/YzĠ:džvq֣n+e]Pլ`A5$6y4EQ3O x A@؏YNdRfGV?9էY68+US1_H)zM3`)[*?h3JC;?WMMv4\ Ǯ\O]`>y~qB5/|!D%^'9{x]q9n ؛3$ ӗ'(ՠThg8QMgR3ZPMiD)h4n]e=tp2ZuHWj% t0HR {1_[qq5 p^Q\>SƓOrc{ 5f%K5f^J%‹rzy1L{vvK<$W7' &7#|\\{`7b;+O߿}#S9LuHĔN:S&&s|tnjZw\(4ˑN m)$uFƆT2bM3!FFiGv.-w5RpJ(=2&|+"KҞx!Pmh2Z r5_gѼF9m]e.жUFD*]FRh- XGp5k ]e`NW.|*զ-tњT{Zw9m]e5txf+~]`ǡ!Fӗͽ׮_yh?F ]hÌA؃CR.w` ״ԢtQ5.+NTL p)9LkOnFE4h M#\53ڧ>x(WNu4zhS|"BU+x[*U*3ʕb]3&#+֞=;ek тn`^#]IDWЕdPhE*+HWPEt c =`FԾǏR^!]i~Gn>]!`"c0-t*t(vt 8+Et8pd5UFkh QR":ztL[O*`4P\Ix&8{;O<>́'+_F+cOc$ )]D\ $PS JRG\v.-xS$GG ,p+%{B\F8"h x"P Ċhb =~Ltj̲̅f/0#u_^PAo߽8eJ߽Nm.=yrecWTy(|%.Q=FmւԞg ;qtX֩&J Ɩrlte0, 0C'z9)rh ٜ9aY1ͩ5#@x"(tyg ,ROsS `D"c(H\ཷf!y.!R#e $'ㅳ`qQ"FS\І&DY{jc%6(eS T[r˫X?_~U)j8t8*k=E­vy.VJOŝ6.f甇ڢʏՅwWӳǫ}09snFi88=-7}9Rƣ_2Nq-;wGo!j2fmMB>\jjkY,PhC+~̽WhxmtC$胵2E#պ@faZ'9j\HX~586hYF_( U8'~6.~pqHG'}ɻN(:>;\q=0Z(1}p٣j]WXߢjbߥ^Qg{_|UUrɭO/W_ߍ7ag poWMYt= EX}A>.7KQf$;{z䑬y#`Y)VY~3ŸzBVw񷋋oN{O wм"wwn~\= kb;1bZFWʾ:\^KV} :J7qhr0k4d]Њ:'-{&mmf;(=)Ov^+58l.RCpE1&(mJEg-( Z %NfOh6'fiS2H2U?A%`VΒ"XiMNQBF ϛ g1q>W6>/?>.iBWqP؝P*~wiJ۽Kh@R ! B嘋˖mchr2&;ͷ`L#NMv>L`es_}yWqq;<W]]?ۧϙ,}?I.mA pVYpQQf![± EiqB)8P0'`ACBmU"dL4y($@L A0&Z%4M6ٛ )ٰWk3x_Ezۻ87v gsz( 3/l BJ1'^a_],Mгz&ysP'H$CH䉭@FQu>sDI?1DBeB&S:ŋ:=ߛqFLA6o٠^h0`<'of,Tq$F}jf-C%CU; A6K-@.BS0`?##j6C]V4Q0[}2"ٲ ?jYvto@ɿſɻ볛}}O}i{7{w?Ǫ{}?v{ϳzm~Xӟhu4g_|S^=JOg3oC^C]^]na:znUmVט1 "8huЦ<x~L񷼘#abؖ l޹bGIWj~q u Сᑇ/ϻp~ vOr;:6i?ze&軨Znhm]|iwXSu-8ԕ=ݾ|N۝ Nnh ^I@ґ0g HDT,kݴ%Yz{!439[NJYGáJV @k贤eR@&$'kzuUafs9Y- 'ِF| 1P_N/:![6=H]x[:B)p:+tF&aЅSp>N0OQg A#>){ϢcI?Mf4휤 P2^Ed_T^R>IĈ0,C6(1{WdP̬ȇd@GJtiLq g$a|מ]kkT}x]'nsA?7tʽY,嬴/dFt(n.r>eyp΢;/L|YIɉ|p19wv"&| ٍeا{qajdFBL@K ۦzBΔE眂:H!xieT)y(R$ raYj+d*ooK-.ӷD0'"6E#LѡFK벨ZJEZ;AlmUlx% ұet߭L*ʘM`#Fd1l&66fm|nδs8y=v,%ɡ%~$%۾DP2D}ە(>|w˝}O}b57.~yHܦ;$!Z#9&6hJQj|:* ^ɥ!Qo!J-"hzrVRL=㌍QS-AA526ndlUaaq ;x[`> ".]2 ˋ}*EjsEXl~]\>]?>3#%@ Jh($CqQx4Wth0i4*dZ`emOhըP%SJ At&8@@_xWb."ލh5݈fiMb͎CQ[6Fm9`f($4e#>$!8 uI8Y1BCh:J 4X2IFđ1?M"@ g7~Uq,l~1" 'D686sbY۹T$W} ) ,tek@l=J!7FKHRq&$;Ccd% hi Ѵ'n5=?UŪØ|fɡ㢞pqŭg L,X"Ahtu`,>AGQrJl%LhcX8Mc<<` lGeM~ |8?yF<7E?PA䑣?^ɑ٢&)o]0Auu1S7z1TcOAci昵Fp2T0l&;J*z%WhWDlʡA%g: g^vX~^YzXg<ȭC[QG]Q%B< Kx۫ǮݲuÒUI6(ݮw~-2z^k}>g-.a;=^!s|GljZ#ץ^{|ŽzU߯xݯ}k:{=oJm/ͽ\keQ_?$ tP_s3$g Ǹ_s?d&^ k$)*fL>iJۣ_knT0p+gf~7ԫ#dgzRsWb\, Gݻx]2:cEE3³;HD;эΝ#r'8KVE- /ƩЀpuV']\Es2r:M0BwzLNE8v[KsaI Pni@}1)R MVS<%f}V秮O%Ey!*)nn=j2J6R*ItAڤ38 ZSZ ^ :9g4FbrCTtrE*YDQ QyXp͞|!ro?ng޽SG7g6{מ~v339 z`T "dN+Ek-=L]FK6ڞ0wWT$)kC5-/!P%PFD:pv[< q.k־m{kf]1)Rh@:z>ntrCR*[(|gɦ_:T/o5lV]rniGFT,U*fART $'7@q!JF" K1i[2) Ok.x%(:*dk"j8{?YԂWx#". ׶HYRM{A>~-1]+* BYZd[ heVrHXzm"WmQYV7Y*%ߕ5v t0hmKYYLC/K"Nm6=&_RRFd)Q%5%м@I9mUN:*L&mˮlf(Э tC] ՌE+/YRѪ X#r Q*'*6IN(~bt8ؘl,w.< R p"ƣ33\qNB쫖#[c&sM7v;ٻ6$W?b# SOc<,OF#OIcܽ%R2T*JՀ4ȊdEdCh /}8,UYK}&ݱTSH鼑YYS)$}4@(GSih)BL%[a ;^ɓcA*kEu!=16X!BASTf1ʌ`,hdwF@[7#g/CGhؾ`(>H) ef19?O'9`m@ MY Qlur L}wXɫZ);% h}JxQCo28r%IScE;3&d%kPX+Iy' ĺ6gEDo Xs'h)_^ /z_$1HP ŕ ы,_l ={L+BbZ͠V<S&?e۸J/uV[B@Sl!s-mFu 1c]i8.X7iM}uocY- A%-sVR[ЌbMѡvBBѭ=7aڮ3vUf,·1M EptV2` #)jΆuI\t.#8m(*I<ډ-Րf26xSAЫjX?˷t-y=HUB”lB&` jڔv& kSi}tYZ45^gґwCck`PәG+8 8cJ҇Tjŕʵɣy;1ĀU%؉QW\CQWl>^\zZc> u;8xq%WCQWZ}Trg'uvԕs!J*`J-ɱ:PMoR]1.˾pYv3;-bOZx]ˮEX8Z|wwk5\oL(IN:,tl/Cg M =QWF????.%qB?<^JN+Y67^XT_zɩ:f*J];T?BJIbZ%(5F% A-J* /T;UqV^KyZ/oU|^G /.vS}l_|9ϻxlJ}TBw PH;EJhPPLQ#ڳSON#cMy^s*)p&.'TCВ 1 #YYXs6WQZ:TU D:ԦL (?U #CGZc*qsfln]ko4V*ޝKQ7:H@e/M5c1L.)h1?x&=x?y1% YtW/B,ii,6i9nDӍ~zHE׉:AyYTq*$ʆ r"$s۶uԉJƠrE]0.JJS@e1#ZW%Ƒ]5_72u2+eeϸO FGE#򨨑Gvn<5M3B!Ӻ3e#J*BN+HDnY ClC$Oy}C{NJâVEI:H F#mUQH䶛l$x %V'\d2d*VNI7ڳz㛟ArkCAaCF9oXZ!\L$; d$ֵ9+%zzh0@KZ/"@!EʅR($/1^ fbTl crXr)Woj22I/?\xV#'q/2O:4ul7#mt듴=;"´]gK}1k"c4j$[0cf26xSAЫjX?ƺ"o閷܍ oJt0%5d0%PS03LgJ4ҩT~:SDҸž-8Eѕ:#sW VTtykѓrZAC+k_Y$#ˡDؕСH77͘K6T)WD!9x(*d_@RXI$}p'ڌ͞C4s.nBQgϳ8,UvãD+WO&Oo)}f`qΜۡ#KPk34[]?%mӥBIAIÙ ;Z:wj%;|c~5rl@/g7$+jkk'*P”YKE%$hE*i 6(Td|F]>MyVcq.j3&f??rcC[7?(zH1=N#aFŵg)ZC/ z ۤ{rHvz5h2@fݻޤ.kV?:OPPOA=zzv/0~RRcdN*] ;0JI%TTOOG4R DFe bB"ywBGImB#(2~b2ޒ &O"A84Z& sкH+r6Gۆlqz_Scʛ681cT d6bH0X%* m 5b5oUIm-AJkXv# JlR,0&KPsZeodI OV}Q'ifZ0.z~W`+ Y'K ) 6kot+:PWA:-LUV|`I+9Ix_HN`} QEr3FWN26&K<"f#&+ /$cj!yY"N26";q%W-kogܦ!UM ?6Mݗ?sѨ2 j_9%}g@H˯24Niq/HhsQjq S$J蔊-#̶vѨxmbQ&6 IQ$`zbF{`h̩Ld@3r6+։狑yCyԺ̺sV[晭*ڏ͟w6ۅ~(녷wRz[Km){Ic|/K5v^@dA$Mw^I*r52c ͤ Wj={/ |;7/ߘ:hoM:GsOŭw;~ ]kE.N[:C̲C%RGVc^gxӱws?<-Nؘn:kBꌦ$X+H]mRH= MS:ATuk؁NfVDG\u1Pȝ%uXbѡ3 upQtz*ݘib0}sZ[' `4TФ}) 4\..>Z1-^ln]mo#7+{_ Ü',Lb j,KZﯿb$/-25n=ؖDE6aY\O֋RvPԏnԫ`>Nr+WÚsyǽUZɢ-4ߥҔKn(HbGI79b8Vi(aF7TaF@i.w  TzE9<^%Z`g&ܗLGE т#'jߟMf9[o; u;-)SOz+T]ePbs5#r9m=Zs-TvԈ; kD{$" _?E=l~{iyt&3Ac@@1Zڀ6 tp(FbRyu<ȓyn({8oos@oŦ'xJšy4IEAD Oj?>Q*bQ洘8 n>P(d:M@}'_$Tuq1_7{XA)%o0t@p'0kSA4I$f){LhCR.:!$hѹ7.¦uS Q>5NqMzVr4\!bÈfzٌڪW|.kSTa8 _R )v{55ag.mP6.mY6?__꘮m5uE ,Uĵ^TBj΋_R'RFx*ekr*7ϰʛ|mUm >e>͍uI{ӱ!vS[ `C/86#<,qm5~)Sތ˔a`*0.,•[_7HpkbjK 0 ?/TiuQ"0".s ТbR)dnU4QU]ErUGvCCKx`]ųNm59 c w'dfoswyṻGm ;&Q ]J Y- QHʨlLW!LP>p]ypJ*7|(ȏo7_ 63Vu9)V9,i%U8)% ݙs)s1”ϭ[ۦ`^ vP/%Δjspj)DJ!Y2Y.竱ib|RN m155%բNOvo.0$xJ P9[Sp|\_>}Ϗ??a>٧?/0J""KQ?M\MC{˦b;4p^/|vY.X}"vkgvVKn@?|}?UM O<ɫU9 /R4)aUET?<Ty:V8n/p_ן3֭w G:uf߸ T!G'!`g+ w4&Uy*ܙ`=@#}͆%䕵sf""$1;l4A:FQ  AL'˙Nd{"'SKNv:戝N;OU}}~cWa^~V0#*)dFRtiD);}?9V:^Y{㭬Ǜ]O 1͎y}-WC2u}3ͯf7:Ws*5.*JZ]ɤtHy2N[Yoy|?l, 1R"E 0ZYIxԼNmQ9*Kq4K!C0!B*Caj3j̀IDk1h?!-7鲉F??1.62cYb~8H( XqE@ARj&XQI _w7=I &4 j8qCvYPI(waaeVllW&;94R>W3hzەld,Y#Ym}rۤ]|3<Ü>I|rkߺ:W-~,TYQyzt{f 3[!}PL 8d+9zBYQ(" ,G @Op)KQ[W12#CRFj]Yr#c6q#c>[%ic''8ɁrXLdYl>Yo1n+7U(h42~lVzKQImiץ4:Ŕђ\ cPyoRE#aHΞ TH'K!#a:0- 92#v6q#6;Emqڽ1?vDT0P q[ٔ (GVKNA͊ iUz`b^A$ D9cH^>r8Ak~=cW1IfD$="VrPsni a%0HUX#F ѕznt}z0BBo KFb-$AJsI&9b]llG/\U]6KEi=.n)H!h#*%C1!d@:r@Zdz\<.v͎}eC¶26DSV1JFQ=G?RɌrӯW`1p`-RJ"`+-TAs!>m9Υk渗,J 죏*5H$ @"*@6tATD;̱C 1zmM׺d.-U/Vݫg Q*Gԗ˩Iv+Uds'%z*]67W9T*{h>bx:nG5^{mJndC}O7g.s~{깨|Kބ&Fez!!jj.,kv2e[ͦWpyz|s >Xep:O*Rff%eKnӢ$JђdKk_G~ZR_Aֽr$aCk@"!c 4JU ^j#TQ֪W!)N=䦜Pn&x92JpEw9lVJHH!)}B]igwk6v`fNw7؀ѝܿfmͰ\kE7}^H(}D 0+fޙ>ߵ=Z^֛ r:2PJ1Wqɦ h1G =d=gʌ|]m';hƆgrZ₫p2N^ Ý3 q|Eu^w*rz3Sa3>Xoq6.&[ek7 A|횹xQ^Tt5Jv~|6tM< : 0ȇ?Fɕ+9X,Qd eJ+g?{Fn_djD4x*N6UIjM˅GCI"WL=DɳUk[3t7d޵-}z~κwK)"%CiJ+X#eT :ё/.t [ :7]DN>3~EsK N[WD{R?|^ػ寭M:kb>9pXQXI}sOqR.gC&ᛨo5]FK^Uۈ_u*AtAa1x }LLӥƢC/h彳U7=nc+hw.I~Ƽ?UPl?۶ ߋz$9'ڲ9%cO+qVR:r~gƾ-ve_qf%-nKdY1==1ꏿG0YLgj)s=E06 -ϛUxA@NF4ЀqAV;Z?fM#]4:֍֍f4YtAzd2Dgg-8|p)',)6K\тH]mM!`>AL:ğ3 Ft,cWgъb^3 QbN_ (N3*)y_UQzRo $l 1&9cd(]ICh[1A.wJ+gUIm-"Rd^O n\pQ)M&#hJ"w]ʷ3p,Iv[)( Fg~Q1difZ0?XKbCm!HgKRXŷ04&nuZߠ[ VF'"t#) '-!/)3 184vH4̅>t 0&B4< s51#YӲB0-bOcDcf*WMku7л I[}˞_fx'``o^L"k^J"XV'U(D0̀R:^vrS/Qi14H4HRV+%__~[Êm<QȪu9xkTV DLH:DVy!nm6T;"l6hJd"֭]Ki%g1jUngܯVz@(| Cޢn)ߠ{o}yDR*W"&E**άh 5؁9 !*a!JA1Pn^t66 duv Lek]C> Z)&;% @!Imc JP1^Tk3i YPzZ/IIP∬3&3p%]+ %Nw!*>\|d.$`:kp)5$xWy-` $,= tI^ |_dcʢs, g#!QabI* Ycr߷gGCxI`=`)gׁV($-Sb3֕nǜCiEN<=n[?4@h[_皮%^8ENCjQX99N(c, 1kM;0l3Zf A:C6 zb JXTMgcl]DEiK{<LJ@aȡQwe׋55z ΫIBg\j<=/sF,4Bڠ^tze;jA=nԲF 3=X/tߡ X/sNdWQ{r2IDPGC%$ˢf<$(ݖj*%uA* \@lBi Hod@I)I…PN8{bHt6ry)R7q6wKsb3n%qz:3V m~Чit]=Jza%<+F$"NI UMP()]#T\>1|?A씖ŋH2ki[9 13A> %JZG߭K,eRh,E!=lSId"YQ0!ɎCg(x@qH9Kdȼq}XfVOStxa<$Չ<s)1LM% Sr .acҥIxlEC٩Z^5EU:QYb4JIyIJg_B1K>A@J̼U K0\Vk2o%TN|~eRTbs[I/R֝-kA T 2hFOmz:gz~js #c!'q}0sOCgUYC-`q~ ?/c֯&2jLV'ޯ'!sm3xyvpM m2)mv)wcXۆUm yW2&M q[xSީtǵveu߻'#rjLs1݈fb#V/U{>vAliMjw끏 Rj&GݏWw=c+4qUU= ;ڨS|5tkR+]X zKC-!/vYQVgz %҂l 뢵,b-y*r^\E~F #__NGn\9bD6"(#X0Pul@>$KA L=U L9bkkoҡ\x (p E&#D3)X;Kf x4_.˳IcE#!'ƙF a6xU0s}$3t:F)AtY$2bO,4AV6*c DaKer!9+թK>s" :)%KVnjuI80lTX }ܗ|T{x<8nD(zNk V.S ON(,^eRUm!WI^R\^Z%J&?v8 |hh5.PlŅ,~|+tFj]@BeIkLȒ ATЂ&GU &&B )dá'&Ijo^z( ZƮKbwΞ!wqmX.fbIEY3)˷u-1YnaTL*TI`#kw9y-\M:qYXLf_+er֐V:i1b2U\| )+X42%BUIƴg~9HOT|'|d,R(P$bs5sY,?۟j+' M,'X[Q B@>C0:HmOHC)aXW-^(nkdv%ak^kkLJz'"rZjzGj!Vm~ /βQ`o :5eVS{f7a]{<ڎs+@e0~(7 v)D<NJ@5 GSqtlRQ<]Ϗ7wSY@ƪ|Zv)&a n3}n6JNJA4^,c|Ly }^Zt9%YX۟6?[r7oX׏\5ӎ^xFz{>hLAapmOуjZ&_Vo~^bq~v[0 69lt|[Ƒx2bYQ-=Po] +-#[1[FLƓ%/ʝ y2>|5GŎ J{&VmݫB\:V|2ϓYS_i0)6p߷'cR8jxzFF?0;O?ϟ?G~? JQA+AP n-mWKz#+>%wu0wqn7}~觤EҤYhHM;rf8S]U]z--hɷ6%_繟l^  |?GH֏vǣ[O[ &'.GEyoRkT1ԅjI 염1[~xIhŒ6~mlяWk(bJ$ L{;imWhi4V"p`>Io++Fz$gB5v#O{ !@Cd#xI5 jh49b[N{pNf2vB'o~]٫Aԫby$ )F++ilH2J*plzҷ [l~-@)!|\L|Ko2I<=~f?xyT]j ]ue'[OݝSnэ3vbԡt>4a_v/ 6g9;oA])_ne^ f78Z1T%x)]F)ʯIFyyc_t0uzo\/jLRH]ҍ73(Jhd^}+R;%m ~5*J2%IXĨByX"pN'O<%Nm-V %++.r)_.]1c+khXb1 ΅h$V3l1_lq :KV-"s t6Syp+U+_6Zܱb[c֍ٸ"5gK|36δ @яm|e듮pKIG;&P\?ǫ~wiM%/cs?i܏\P8?f?j́wǰT#(-epLG]Y!g ^3':xtb@OrLj+ sIһnCO7ay?saan`$vG~̏ÛZLh)Wwa-i7jP HeߐӗtIj9nn'{Lav=πJϮnx_nׄ[02+ DTC[-q~UCiTwl'Z;#hmѐBGUT^dȼLDt`J#'ܘ$*mJ&v\zo3THY"!H@mrpkS*(FΖWnک>~l0{jvumfY9 7nѵ6Q̾2Ux['==ҍֈ|y 7wԳH)n?jn 4ܮ^h~Moخη빸ʕt<~~{ ~ Gx]^-D~ƫ&ܪGpMW\%k.^q9cmX-*&lWZ{SOvȮUV}Oeb[,"q\Ñ11P@x*)^͠t] S2}_|g˗o|`5}pym [l#/Qt4|"ND[,׹[!D[lB ,0OFu#TJhI4V+@ 8w+zT1GR*l6_Gkxc@ bA/+rp)HpU+ +bQ~IwQ0, % %j,ie( 0. 95M[+R׶܎"^1>ٵM96wL~볂E意 2D0}P J!zfi#xWr $]ַ4sDKS'B2tJ"g0rK6*Y3UU-;Uo ]-u@ldt'{} ;/#kla4XhTkׂ@ Vh=$r͓1l@cex% m& xХw#JYc(@cŸcWM kmZv`xV[,d85V N 0BѬ#80I;-iay5!"dipK.$GP9a Y1F,jDVX#F|ѮQ(aid(S(@jqշD(E5H'FSB7S ( Xz&IT(4-P{}#g"ڡ^-YKvՋ^U/V?H<;BRsZ$%Db+b J HCcчŸcW}( C>< +, jE?Jɵ)FvBrau6@#zNdUHƣUH u#/̠0Qa8Pǔm! Q`$d XtZC(y(yPcN"90;3%"@Ʌ24 ΞO1[aٺ/QM'212|+qEM1^;>T? "\l654Z21ચ.X vuTkJ2(eKKQPnYR.hxT5Hb3$.08+a~2J ` d1q]6 eF1+XhS'ڴMY-3mf2rNΉۗmA+7BwK؛Ai~8]?bbMڳ 4$ v5".MoC+zoVھO, N./Zlzck_o>FQhz>{3fl~pm\&%;)o?,|J'mTO-|F`iV4*y2P}괔ro/y9i}0KǸ̒7-Gj,k=n,Ģ'I8n㳼RN^5_W)r)pB#OV5 B\K|ՇqzJ_zY'}/K}Y\\omC 2,*݁7??|jp+7xd78l>NŠf3['_G 'E3JrZ &EE cA(C=ϛyM|}EiOGOd9j>۹^5>vo"No.p_=3~=E=J]6-^c]`H 'Ԉedh?!n戌 Ѷjr½)z E&wzޞשn伷ybrmVQx͋b2ol];{>DTnjgY˿6&ŏ8BuEۿܣm"Ve{T?#@ LdjA;TRWh U{66L?uqk6Qm11dO1H@&*ld$2:Bq!'mm{ Xi9ϰ2вc֘Y1k:fMǬ5c֝r;FP٬5ct̚Y1k:fMǬ5cVdRk:fŖ5ct̚Y1k:fMǬ5ct/)+ٍbRpga<͇]$- ip ?HEj'qҙ&10QWwz J }XA.J4:z.}MTAkh"tJh!z$7FO᳭Pw6Q5nZ"yy\ɧCcZ-`8Ϋ@?5^&tz95r~߾h~$*=&l/}ˏ͈Ͷb?D_e49g}s|s|UK;r+Vc*=VmWSL=MJRZol}mk+{pUw`Iw H_4Yyw YȲ 9K#DN9QԪ*9HG嫣^99Luo.*޵q$2o9~1>Y`,TK lm('RFS!xs;ΐ(UT:pש98$o=S$eXF 6JyYޱuFΎrV՟_-koaTT|Tqc-3.PԠB5!&DĝVcE] L)RȕBzR>}-(+?[J FL#ZʹIx@ ۣd EGz )ymo'.~;~8.]a9IiH| 12<x  ;B̐t8永>SmFo-x@6/l{B9zZ(t}?w9ZK P%EFRd[ =^ UأX-F)!~Er$J) ^ )//kA3Gsm+>7# (L[6x6K;+ڒ@!lc"thCi GcBM܃ _omD:"ALh=w :#ihP!RaN[ӷ }jgU ޚ,dMпWyñGGPCWo!Hp 5u[C!DN \k{|-d-diBFPR;z?vۥxo}m)w=ݻ܃/ufK[6LQK+@ pQ&xǕJf)e^Jdto^1|;hd" Mng -6ChuBLEvq L iANHwZbx󳂼1<\Kܬ;4=.Z(Of/ +:9͵!\ D uЄ:泽 _${ӍWW=<_,E,8G$hJ`HD`!f~*Hh>kN4O.2_>U?5Ǘ7r$vt iBV2!6 #d(X-%w&'YJXCt2z@Qז?M ^DvPY6'#,r5nhxyυQhYHJҜiRLj|5~Pp{q@K`g*tbyK5qкg 6ѝSi|f,uzi%/fF* Sz^|XL Oe*翋wEMƟf \N֎ ':@y4]l>?pJT0V1H.p"GY6^$!/W(eTQac=lL2Re^qC\$*pk.+XF4'7jvtzi~sz?a"n@>p1t9&!wCq _ULWq@Jj0YHZ #|Flr1y8B i =tH:i68Um yIbn7:'K zF3@nFF}_ݿ*X8M͞Ԃ;R0bv;np813+TVk9PW~bߛ鿙_uaX= oǡ:vm  p O|Wp-D{"٥n鐇!&iI8[ TRj&yh/lIjٞ]3ޅٚ/{SoJN7J?s[A+!l','xy7/PN~-ϖ JU)XOs3E ԁhɕR0\4jL/ W_sH#SW)jk͛7w2kS2gap"YZ ~s =HQaB\]ԗgY~?y:0{L Ekv i4-wv9R'nU>;dmI3]MZQBN, gˌ'~̚t\'F^[GgedEںVCY:^|XHދd7s(&jc@h1xP& '~돋0^!;w>?Nj?\Pf.ﻋ?/xP8k%>E=x4?Ժq܈U77|yEKnPVV0J8?/~8~?zYxTS~E\~>h"4C$:j[sMe#߸MnŔID tnCmA!L3Q"rBk-z酜 KWV):N<8>,r*) 2)"a\s%Zx t:ysRAāATuOlM';_|;`U:a;hIyrS]|RVKXD˺^ m)9ΪҺʜb?Rڹ]{Ȗsܥmdշd4!wu]@Κ?j>0_M(cqps!]-_‰W0.x]> ? gEqx+-}xзm}7?'-&Ӿɭ+ewǒpqj[ۻ&ȫPh48pV}kW:&+!-+QֽMl|4wE$G]L)sJE =zZ[>dgK?{eݧ !d%I8-QQeN\`9'<S@Qĩ5tRulrZ@7NX+|Wz/6g/W_ kGFcio GFgR$-Xdh/o޾]AQ_KK^7 Ο4n{|7cPInPF/r1RvSIebJI, 0'2do|N4_nmoOT +>*.?^eZSpe;9?Pj^vNRi}v-3/VTJvC%W2]:JO!<l=n3ǃ݂ :z Co.OfoRt! zw-wb],|Y9/ڍӁfM:L=~ݚ'+? fm`sy5:O =`$W8Ljt5oVx@8BMwOKTUԺCfRd%@ S ɢ&;xob }G(eUE.<"3tt$2[CƘ"{Uk4 Yrd#3ɞCy"UXLɥUoM)z༷Hˈb̑읕* ]َFNҕ*{,[{/?>/7)(*`GFnA 9*ՉDFpRjH~YT (& 6$Eځ~WTګ*PYP^PQt +}zF%˾VM{J=)u~9Ҿf\RaҼvqV;Έ'/[`!׶mGj]g$bCU>s<&)EH Q!1c z0"0:ɹV>˦+^1fQTO l MPTI\ˤjkj׌JR8cC]>.B Dy˫N.1 xOnh㷢yd 6{^0٠>,U)QgZNָ86kS7F{.M )ڔ!3v,&:Y95s_v5TkZZG8y`.Gf3N/SZ#Ȓ"-h^tD_Ufo/ r +:Ȅkb"kR$a&JI[|*@FuB*jׇQ?)q_4b5T#Q׈8p&1wd&Ar7 UN}͜;CD"Cl651Pf $'ڐˈ r3>z$IL&t!jFNTړ^7S'_g5.T/zQz׋81# 3 +&8^yo rZcb6+ߚYKRBzqzq_a5TBe}yTؒv_ƗS{Fvjٷvյc'am|v >XӒ8⷏Vs0X#4ПƃIVis|arYob; s܈aIJI!NDaI"X#IM +6`pR}c0}*7!FX~\ڎY]3>t=~CJ1 W˨Ƞ!(0u 5Oo[#mpo0q"cEUۛ[(J /?BFvB.$j'}9\8 /0\ c< u%eF]r9uUEꊨ+P"QW\E]jwuU\^] uxs_]V/k}0v3=:ǴPO@KTR}y}{o^|^D/qSq෭e<{?^\пgK>u[LrQ4һ޾'` EC {aBϱ͕glڥGVllݲ)xxfӞaQ Py?[,.=nۜ}UNP*4Cn 0x-)]UB~d6g_%5Jv@ lSȅ)8Z5|-ҲޢyDv0ꪐVְ}WWJۿzp#e$o8m?0t9"hMţ U ۵e ,4#zVK [&IdNdY&A6alwTr $<M;K_Q3`3gAo)\]GhJ hxF_ҕѳ<3\'qG;?*}Ojd0[+z@sg௦cָ}wos< cY9b͹/\$b@cuۧ%xŹ[EfMRν$B5-4 tX.ޥp=JᲕRڌ/nUDΓM2<:|"+#́ISUz /N]y= e>oBH`9G(9PnW5r:&Gdb*(/(_Q= )LF^*-Mk6-y6X5R.uP҂ʡ8K+$J)* I]`s.L8/X.EϽVX{~+$)6*uLm.+)I ~\s r_DzHіqb fd"$*fI >I欬,g(g\ ĬeUJCiˌ 4(3!$KS)IrpX"p\;' MFMN0K#8&Joi3&! 'N?FV5.^Nz2τ#*IЂubtDi<)TJ}'N@3dw˫}pY'6pu pc:1,3˨'.2ө 9U!YhZBc0 {Zږ mWD.nV5TLeby1e6 AB0mARDYJYegp0,cţx^5ެ!&/ z5.kpg˫-M{˼\MjuHK^"t;<۔IDp2fQe"ѩij4pנD}bn:f,#lQYl) I4t6+Ivtlɔ&B&l*;('4-<0Xfg߂6Q!򜠇pڝkUHT X׀B+*g\>ri5-):: v}ꛣSV}s)v={tDH"lghTQR%m_Ci 4|zapu^Sv8s)cv 9|ZXR VHPثBt 6Lp'mHs ,ZIk Be`i7ꖣK|U4?}H yYt.V4(΍DC:\}LgBLx0mZVw:=mdoޝ<~>_*) {f}I"h\67tavF/uCgҭ9=OruNCl xe[DxN=)-P";j\RP̾W+(T"jaezE<ݷjRIPh:r*6mnEpmاVKKm*ڣTRէ>B 8@FpIe b>p#u&8gx"lCkzbD9mS(@sj4øXINۑ,7L`_6a{`!g8kR[.%q*ѽQe)3w3e&9p De&i*a %,3AެHR;L6 ̵n{`+o$N-œ|zaJQXQo%ugobT"6 = =bO UHO>$SuI{wؤ OgK?wuv״t W8"D ϣq2RC+[Q uy^FcD2Y+^jʈ1h#2&"]8;fMq6 c߳ [KÁ~>|'M> b] /I,7y'?ߕ}3+C'F %I,(84U9b(aFY+aVu 1y^QNF60ϰWBDy Äy}i@5pD[#>}Z }[/V^PxE'n!6?>Ӭ?&Lg[ASXE?mQc쯣&aI*%4} mQbq 6KŖ)X.a>5EoyeJy"9hnͰkx^2V; _i7';Lmg/2Gݭ|9.s;^PWpSjw $rF'KqRJylZ.A9-F¼!5);; út7xjmܮّ;۳U}ؒVwO{,+#u{p~հ﫻ݶH6ۚ-2x#W m?쇑vMkD',Ŭ^PL4uӪ#wXY/&~Z􌯃ͰF߬9w]@H;zGSa`u~el1M>ՙM>I%( q̙տ$h| ٨AZd<]Ϳ7c/;ukp6PE͆ >E8t3/o#b _I*~"*a**S@l0Oۧտ>,xٿn!٩;͢7%{3&l 7 ۶p7K+iuޠ7LKlF^[.  ߕG~Ӱq)_w5΀z|fwz 8فaJ߲13;jmhȘn-L%ڼx` NԼ8z/[^>2w#ff5Y"y?vD0d) _& J8O}Ul箂)aa)5M 1&4XTVJ2oQ@͝B2,PI^svdAY(@t֟IS^O-0\3,҈RNPl CXР i+x^grbbiE;{ZWLr Wq7*7}g(r}*rp變ܲuՍ-erl=,( ,.D8AxmWPBGH<=낷 ET\0Euq&戊c*,mPr(Ĩ('= h<)E;E1є;t4D{S(sVA-uha[0 3xAca8$e|L>z=K;Z| Jm&L%̘$DZɈ2F.:֝LzC8<_Ho]h&T+UuK9XI=-[Ðޯb_[eV4{R0XF,a^!X&HSEA[5u[%x3'16 mZ]k7}2a Z)^ͼex3' 옻8:Q7//`g)鍙[D؊xG8^@l~J Vix}σVB۪σRv]dDJ-ʳT6٥}[gi۽߬]w d>HM>hA$^6Qhn-֛ܖZ5 i=M6r]U"K̪ySh'h}ZՉUb:&^mUGl.^RQVV͗Q /w54G"=.N"=Lj ~E1B6)걈CO`{Ee|j -F`yܗh{wK4~Dh%Z3T'n{R8Vgp2S[5iPiDʹFQ0!@qZ]jfZ6n8h+Ύ.%7== Yh}& m<źdlHgmtDZh tC$^DJ!*D+ZQjD#Hv GhD1LA =)ޫcӻl*#R"%1dNF03,*͸   B' L<{=3B: qVwĺ [8L"wQKXu :8cL>:HDz2ex~B/ۑ;aM;8E6_x ]\5RBqԍLَ`i $˥M,@NVj!L^VkkjޮM]T?O/V``R}+Αj4KW~ PLZwn!;YEw!3OtƏGvgwsn-wJYEvڹVz޶`hi$Wa- _q;z7w/o^~wz޽y ؁y_9A?}<~|Բ0_;5Ul.fw\rǼɧfcv"7Տ^K ݷmf5Rar2Wi}2UAŬM urr. fK>ҭ{J+1ugkOa? T!AG'!g+ w4*Uy*ܙ`J'ӓ -eG^#"*4`""$1;l4A:FQ  pNȝQՆ({ֆlmf9)̷[t7ܠV LvCi|0Y a_pqG=fYcIEiKSԆ}3:hF g41HCP`a`'0PSAm{"$* rȣ6Lwܩ}WVo% 89MC>%K5Z(bʏ_vGg9#y!uG\E>Ih-R܀P>RL %J%iy#32$bcL&U3qXNW)wW 3䅫p_݉>e۲|4z͇ܶͯ_pi8;?3f:Z-ATF% Q8i$"S*FlCKr%V&8BMFg3 d`M@C:{Ҙ0p6ʨoy%x,1}*IfHzKuF0K>+Dr h}c HCWee ɒߌ3alS/IXo|(iP΁%MFbuy gDx`-YS|٨dWH3E^.xL$b49|cmJIPD#[ K"^.B.~;ȵC irbZuRN` uP%?.w9mtfC.ߡ]?UвonMoo..Eϭa:鯹>m7oC[aϺ ץq.k2,I鶙pVw mʻmӬfKrD?+zilsn&|\\y ҧZ` a>T_jʺTKUC. ⻷o7 pY}'oKe$"_jY"Me;^ԄP"Y+"`;VΧX Xyg3 ]NGZ;tV0#*)dFRK6;˜,;ޱz`>yM\kyю ~>D>*SgP<+iRx0%sH[ ,f 2rӛ-ڊv[Ƣc\Rq]Uۿ}eP cTAMf:~kØ-j9gU NM7W~_8w3)vY1Iݟ<8A]ͭI8'[y3 П*}K QDREX\&zKtPsUfoƩ0tf$ۤZxW-T>CMB)_^ׯ #(u9?nًEo' kk]qk.fy Xԭ?"eK(QSU}QZҙ*m=_|ɬX[ۿ_]=dpݱXw ΃abKt #i0JԿ73\2 g 8eCm'"DT['\e+?x1CQۼRϖE$6Cʃj;/g B|v:-6]dR>KIV?HC{=jsǧ(m^U>95B Ž]XzUY0yviϩ !5+qaI|3|ލ=Ic%y@X.P &Bw' s=Ueb³J)-e8L$}l(ilc,X٠H@bjHO+iZhN@3'u'G,M*ޒBXYL^jʈhA #(H8s(m8[Nq%O)XV^[m~5+bϮ>S%m[5Cwc/bѦ4ad.F\)Aϔ8C=e1Z4KǍ<+W[ez :Im tret4LI/NPLʝL$p6sJ 8ϱ W_}06"nۮ8(`_:&`["‡wd\UFgŎ:Ə6zJ/Lܖ2lim%S|caen=GɀE}uN$\^Jkʱd =NI3E4P!)@nдR Q ƑT!XrjOl`DvzOCчIu;i<6OhO.\1+D^M??V n|ڟO܅`ꌣidRsK= 选퍻#G/hO_GϘ*0xB:2AULji!8lgV߳ ]ޑdW?5iafeWxl<7sl ȺB+G1b2*!f1{Cb=!f13BL^4IZŠb"٠ƱF5p h9xkuK?CXARzʊUDIgrA?,e{Nc:KU0 m/6RE1#ez4x5v0 ~qB}ڼ,fz= j{ypUݪ6_O"-韍G)8JzyyMv[.*nޅoo}HOa4 VH) эkOWGcTT}l7xWbf?DCw,f ' 47\q~F*r64{b=@)3PĦ׵]_^Y!-4xY,p % T!EijuӻFRw~=br{KYULa= p]gśťWU@SuByAPbq8f r/(^ΦrӜh Z@{[O f*eM'FtL&`r0?L&{Ζ`co[b myؖ;%mA޶|eM '>AgL|cO<36fhk|]%;PSpvw O39ƖrK#J#/IShw>:G>"G}5c㠃@1H00"Tp4Cd B*CHZ鄒1&Uy@ilK$ic30h u g3L2AYa 91Zl@wDqVc7>)/24E` 9Z=W(IoccLuKP&x=f&qpgS5ok^Q,| '6cN)%@o M0( Jx(oVLZ"$"MT2UvDRQ 6Pd4gن%U|π͚eknO d8 q'DH9ϣ簒X P9h!Pͣ3מrBBN?1#OV$!jo`8R9|PP@`1)A{oA=<ືI? w=c TS5{HIE$TBAF w q{WYLeaz=DӇրc]/wե'WaBr6ތQiV#0Y"1hef\&()S+D6bB-o#c2 e#]Oef>U36V҇ȴgJ2).I.x< !hT`XqP쑏=`tclrlM9AofI:sujAjRMH+ID?i`&d`rD$I(T/\YRWr~>9rJcF1KqxO)+bnvSA Te)Rn6 j4c`KߔG :ʀMS0vY4a#A28<0^szc g3/)]]&IVAJ0:_IN1+fS:fO |]Udr:tDZ B*X\*YKP h<<J12 sH[]F‰uXTAIݡUQ!TR,-ʩ܌*UYk'aݛ.[ zO~yvÁYb(`˝7\ Ø8P=x:FY^1 wB.卺(Tr-c/t򕔷H xQ"9K:#*/IQ}&}}G/r48pKэR x{f8>5hZ źu#dKHvIJ{1ts?Kn[ْ[lxzO(C & $ir/-t+*F(E%8[zM2!Q+zQ%֧\:0]oW䰣aMzc#مNASHybwKF5G4R7,UŪbb ,0EB8I3!2d0D0ő%-g/|02 mO=1<6LZv=sTroHdJf,/-(Vdz=P228C([ \oOdTH 9*#xI,|t2(EhQd"K~h޽ XΌY=xva݆iId:+L35> Rhe<ātIK '$M\Dz(h! k%A+σTك +H$yϓRTJ:aE8A _ʢkDZ:9s&mN~BlvW ;f-ٛϼ:Yt\%NW?8Kٕ7e%笋߽[nc(b!0o8_ۧE,8N.5X Ps A+(ܬw#@_t =T93 # @z4"GgJ2$d /M~7(e#W%oXOĠ*OBǭO}Aq0-y m8BT"^#[*D9m[wVv`Pq|+Ǒߪ69Vט8DR]fNǧK3*ruR.]Tsh -CTVoޗ/̦mb9 owv^-3˱] ٴ PE[ e4dLcK\ln mlFr*|B q0,`ż,{;:'4Jݭ;jc_5!d$\FrgX SFVyRT"O7^7:Gvwo|.ͻO@?= 8FwI0&G3۟дjilo4|9zivohhtf{~I˛A7P 9GhY a$ż_zyUTt*P.PMOdGr%vP?Z&#&V4x0jO^S$Z[%iMF%LQѱ>6U,綠7R7%! SqF9N$I֔)&9a&TsјÞN+{:{u5Fﲳm'/;@M^7i;ϓNeh=dq[%Fs+H1:16NU&)!+^gŷ^|jvz Ov=]Bv$b{7/寠Gds57ׂo ̎&RvX1j;UW4"|p/N1In(Hh!$A=*c{/Y')RbP<1C*& 8 ң F&c&l9~9Q'ubgQn 2B9Wom8 ]ݿk)KBԪ hARc2ȿ("Z& mExkg"{Sf&4 ' 6њ`1Aߚg3rDtEwq.=wwPo8}jt'sf̼Ƣ_lu4jUs[@)HJt8э$ڈFhFjF" )R}O2/l|P  ydi`@y@WMO#R L8M79pd4~a3_j?@k@κK]M@ |x ¨dS* >C N\µW6쯠?eӎzUA|s&jH\ )cQ$G2gjFEĕ(E:hV4O6׭|vcRg/AB*rm;!LMēZhGX(!( 5KjDKa]7-dv[ !C mϮ' -Km(,uUd/W4CLygNHOtq`IBYN NJ7VЃ)F(N' '`x`"RzSpO5C:V΀"G)`kp'wK$f]\Fbea pR|G ~U;?|,H&*x/z;r7RΆ+K^wtqЍeeQhG 2'O!'d3 Lm ZKU7l=K@10?FfXmӦZ릒r͌]5!8(B瞕Ǐ3uᕽl^]WToVWSo?k -Uhi۴!\hί2~TTjė5(Uϛj0tSw)'ůXC5TiZjRmUT9m⪵z[v6ò{[ev]#MYio+%ϡ'&'{8(SvKit*huw*=)x1t9Ek|%~縐%qvך#IP%)ˎB'#o"B@'śP= PQ44]p5'Żd6>T 6ݵ''a7 Rx4dgR'ſj&q; RV%cozxZ4}}݋4sd/sLWh9gZ ҬEMv:?5G傾6ӆ<_!,==.|^ɲ#IU,37†~9HWCN?^9͂.ޕ^Cm{*.z$½^\/\i,t^O'(=LR]3 <1Mw ='&twPTG WJ_;Lstq66DOLRVѹaRʰLNxTl'e9yihhm,x0 _f">W"* Hpc P Q,H >)gG" C86 y+{=K^RgK -dg}gQ(%$ae 9c"(id` RHKyRѲ-K JA鵢2t٫fZY=%U3Lhq#ʍ 7oqf\`BKڍ0Ue#vWkG*?p퀶cܵ&*eG: T4}y\v>%G;;DK04  #Z$ɀrLJd7k 2 ^3V=r,5ZX 񠣑 EG"8OJ#(AԲG9,>E"a7u=˕ܸv!Ȑq]W{ߵc#rbv{c/7L!vp{ ~C%t6Z`-SQ-ӳ2G[0!+?}7tNr\2nh%Є,^;7t8^]bdFJ.ʹ~ٵ坺6#3-kF30ܰh^sAnkh~fm͵MMa3).\;@;@ 6%wziaidTqX}(0FCmFvVUO˪̆h3R1*th`@:%#=&Kdv&FjpdPt,}W2z(9@mi>j\:sp1[$;57 5:/%ЃZRF<@G5)Bȼ*c>8M>)3]78{*ȋEK*z@:G̀ 82 ESY_ ~ugD%lLe5Dw <}X^&bZWi~X2'ȡsęۍu{cl,1eXՏۥ>t>옄[XcT$C(0q%G,X2輩GvEC@xRG. c(.]7]]>[Q 4֊ c'vzd /9[˃'`~6z޹Vemvo:V1 nx+Ztp6'X_]Nnoʀ£!Lhi" L ~(#HwLG)UEZB<ЇV2/>߾;p^^/Wg})=:AyYCʇy^*{G~x~Igܣ|?X_Zv|3?ʓ#r`[*S kZ Į2o"''HKHg[R #x{$9qw>rzv'-xfW;-w3 #nq.խtŷGlrȦ.ebfKnKq&'彞(4$Llu9DݓUڶ ;{ܴUejrjCV%R꥓KBf'M(65̜q:&)q-30ZFgYkۓ 3o㤭uuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXljuXqf%qJM$A}4!Vi&<&hZnAtέ5Ųr[شy@tU'r.O7iP m2sй爎M|̠^oequu 7 `l,:xڃɹfRV7">F}`0s3dSӸys$:\op#>p&_ ΔLjo :2*:NEo>se(Pַ7~~z3x甯|׼Ƿlz~RκjYwouF_0 6k7^_υZ=ݚ%9olHq\-6}-Ш^oBQj Hs&LJl+5ʫ^- KkR!X#0sGqXeX,‡RO#bj=  ,v#v"^,'uEN>^/nBK$%kE"hPl-{EVDokf)ucuɼnid/ؔRD*OũP\>4ER.Ώ%e.y,Q{w-9j3Nf[Wyh2 BHD2c  deڥbNHC2ԋAuu5M|QAL%}u1zss^jǡQ #"G]\c#hiUX6KorA9SdD֏I&eXBަZL"s#0sGď*uB\\LuL{üиEq.?P$.JBʐLJ;EkZWEhF ݊J"ŗyǡC:,@ >֪x#kwhX}zU#_^GUi䣪P:HuYuYuYuYuYuYuYuYuYuYuYuYuYuYuYuYuYuYuYuYuYuYuYA7}stasA#s:.W0eBXzѡ4;p՝f{Cw0ݵΥ?=,ٖ_~"=n9f? nf OzxF4mqݺ:v {>o~Or o݆ WOzyiu1=]${6 Wn}/? #?r?܀ȑt~:Z|wOz=X_]N>]wb57tJۇ5ӧ|ͳ+7nٕ}OKϧ[dn|^{yJ*ΓstZs*ZVT:?zM-O ;(Yxbzji1eemOԾS{,P{ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @ @#PyiK\RεŪlנS)tza'ӳ k8mXI{ÞmG %aVÜ ovq>,9jLɀ5^ a{5fiMPkV.V0s/f)pu|FԎMrZ:U_-tm$S'66KcڛPBpk/M@sm(N D+Θ9='K)yٔh5uN#,% "XwGJ;+q^!J ~qbjNѮVtY>X]Y.KKrѡԡhaFSIq^"âKm於щI 5IV\F>#qKIw[Jkr{$nU_sK"Z`3D&*DbϹFCV j{nsKI 9d<mMGd;_ʾ| 5Iҝ3Ȝ 4S%iHi+K/Ѐfaa9$Δeڣr:A](>Y;xau>άK6%N/]thxW7.&_4ti*zz4u3s'⮊SŊs׎]ese9gަ[jv]VTW3utCܹ֨5ʜKew؊jJaho\.D9/ɥc"rk3hi 4xWef%AJugL3|t:WK vT'KnܓRQl.ơk+fbV7fJ>&. $y2\9XNxD 2$(!DO"`.+2F"oMpnޙ\Rd|ԖhI,IXw6t5AzRh9#/9/ܻDH%STQ(i*]CH@F.S)hlbKB/-JNjgZ a""1\ bNV ָj\ C]XwNĀQE!xs^h nQVփ>4)ro1CCε]Wo!Hz`=kA;܁FXY>G,fB>AST5%׆ oe99l[m.*"R Iyh W$@|mHZx9hCά,%TB%X5ޢm˘f ,c^ HtslP uYvp~$0d=!|D|FyR$g9AB' ?j^ጺ;s'@Sbȝ@}}qvQl('RxF ZFéД P`Tr REXc$56,F̉$eXF 6JyY^gź!\+Y#uwŧH7{|eTȠ&Ą(S&U8XfQp+Sp%!pLQoXId4lɤh`Gr9|8N(ȘcHoj3tuǎCgq #i͍' h! !Rޕ2cӋ])SL&,CU:ۮ/[LyGIro_Mƃzɵ"cD\4=Scr+ZY3S"6e/Oo/5Ja HgA?Eiv`X-B':;JN Yߺ3!,/86Cnw&MjkoB:p5i~7Qhsb7;^vXAVwd`o7fTx7A|#-q!N2.ĩaa\3P#E{i娑B_hsMڰ?f߯^2ݯ,h.v>M՟ To{Q=372Z)θ_5t|U˖W-l LߞnbJ!m8l# ?x}Ti&Na.=TF\:.e$@&@lub `zؽqlOgNk:0i 8V)XﺗY淜vh. >g;\$=X2D'R(~B8K{EtJTģP%F;iQZ8G^wvյ1d[Q'0x]A>=YۯAnt~T׫oðc1n|WV;|ol! sc+馡sgHYnEҀ^/t.DziY{lؖ3|Os2M y||14? mOF0NCZ$ qhY2."4_i^PvLgz+a&rG.@+U42j%P<B(hmg[h]bͷノr'OۃgJKkӮpVy<zK,8MfDa6q ֒5 ꅡ‰dIc9PpP\fhT< p ÙvG=9X7$C¼$r}!)%p2DtT0>*fQ${17#xbbnCQwF tN,٧H^)m)-* BuhY]4Dxa} Lʍw*ag׌w}fYV'.`ZYY r8`A+q1YO7pC%r3oxSa7'_5\v0o,AZilw2a< Pad$/Ɗ}jd aPN9?֊sþL̟^Oֳs .&Q@6R*nqYQ +T[?F.BЇ;us(=sxx0=;Ss8X.6 Σqbm'8}4s3 ]ufWWٵ6._3/'/ 3g`N'r{=KΑj8|1m/P{ glI-u5:QBv6,,oF?Q̳hxz5uté6xeou]v>qV's%Ϗqȷ@5JUVtVQ?ޔ57U(oۛ * OPCut^o^=2s/޼z3w8Sy ]A;Mϟд.47oAӂ|ـoҮ(ޅ5lnCfa16]`&oWlVi卌(kMk8}c0!)M"L Ƞ_c 4&7JDYH28av/CuNՓL6*)~ 2)"a\s%_m$I+q܆vL]&3Ad7cmdɣg0ULْ-YEZl6fW]]]ԲӨP=3 󘝝ыjUsnm%@ھ>ӛC r1L/޲:|xx9I,Uu>1@(W;g㕽,HZ]cÝVk8i{7}07Y2.;vց4-I{Ӵ:lje+dRpe0\(aI9l!0i %De*55TzqL 1H*xh,^'cEU!#z4La͙Brc.#$TS2'(yA68/H'Gr/~tpjb@F] sbAEP-|ԱM&WY32(q11N i,/lW) t Zo=Fek5EI-iK od(Θ>Jr tT堎`@Ig,XpcSc2Qo/*i*r 閍ev?;u -['^3v*ՁT*?qrnO},\1UII9|,4I $1&2ީNM< ^p5?7s).HAN[6LQ*fy"t!2% p^f.ՠj8D{7`ZLS 傩2,85`2bk^p Άnĥx:|1'/M@/rcǣgkk/E&re}a6HZMQT '$e.!+ g05Q`*P11~e:b81p ,$ :1 ޥNP&<Ⱦv&8!K2 )$w{M4i3k681o v4|dPݚPj-5n\iv[[KXU5cvS@mmBA[I_qV7(ϜM743&sH5؉I…^LQH@TXM3:ߞ-~ZvI8Zw#27L?=bhjG?t4̓d zof4ݑn:&ﺸ6>rЦwb {"lSQ*0Ly]VfX9#^;;_8`Ae:IlLׁDo>o;-e7EQ>z=a<8?sFe̻PJml|hzQh>:R9dc{9Wsުu/\@t9`HQs,D6+:e(=MruZso)XTUq,D ^\?ʆD7 :eF,;\4 hO+O@׸^S?nDnx&F Ÿ|?wpX-XPv^t/ GYQ!>JK˻sƻ^y<~sknr?Y֠eAXbPve \O|jHex?3&l=f*!feDY)pPl8y2k\. Ve=Cy)پo(_?P ޲F &I^w}ϋfz¼e% XVe*S=e\.>9mkkq׋B2J$A;M3mZ"|BYLi`8v+,O Ğ7PM[b5l4;wR0i&-20j%^s.V6U`1x0PG,j!=47*wmȠeu;dQ/l)hA/r~ʼn|횉uh]2李/08yy|&?2}b4 f{v;lvk~wo dhSwCY=0.rd +D0wE,,#?z+(…,\@E,3![5Bˢ'ח-&O#/>ˋ=ܻ?T -EzR\ш{wvߍ$xf:'ѫobbVȎ2=ܻiZwv4Fףf7fm]]*-+@6-th-ove0@GWHWZcn!\jw;t()]=v=?%KNW[V+LWۡ0tJ˛EW| ]=)S6պ-tE&6%#+P:dsGn<(ǹ;c/zUqȝ6'cB藟_ƣiQ@ =NOasA |WycVį^T̓i'N8]d+NIn!N9pma~)? 9ʥ&Qќq?@]LjBT2ο/`4uЅf] e 3_;QuqvSr9w vk5ʁvrIbm4ʠ§DždE>vp-iheMWeeSՎGUܐ'u]ߕ0%5tp48Nthh:]!J::FHEۣdeqF-$(!١E6D}}&SPYzO1T&S[3JC,1"t|9vjlګ,0 «?[JPhJnHEa$e^DOKWc>ʣx}zQ'Jhsgb22!.)@}1}ҶDVV։^DvW.z-i$q'>Z859,NeR^wFMtwݹØ}.zYfAhȠXÒ%`S?T h-•A񛷀ٙGhb+!mJi"h(]!ʦI ThDWZX۳pBn#+tΑm5 G2h`CtѠ++TEto9Ίhn:]!JχĆ]/N"~}+aj;:'ʦѕ؂DGWzm+9x\rBͧ+DeGWGHWpl#O˘B'=Y.YVOcWfJjF7%%M#MsM`WpM+m[ 6_DJutut%`V#$5tpi ]!ZCNWꎮpsIe5tpyk {.؝]!])5m]`۳ŀpEkAD{TۡlYGWOBWpDWضg ZBWVԬ#+cEtm_jGwtute H#92 \BW6tt|Jn@ϛ]mrq`mVC4X]ɎJՀkFZCW9\Be:zza,L= L[ZEh3N[CW4U4(uGHz.޾r 5=iA.ҳ J'N-5HטթDJ~E}oת I4mM04c0Sԫi M̮n]rpڪgY~׳M̂Hkےo1ʌ\d=䶒^^ IorSm!J@-Qg.=nb]ks/t|N| Wf}6bX[|Sz.&a {NJB0['Wso!T+W-~AyW6y掑χ?aW*OLy4PC/u`_(8cŷjc?ayw~+ U_T] ؇Qz_SC;&U05!2 Q{ <^c:˨X*ɭ5g~ў̛p7|S\%s-|X:6D旇`e|*Zd`[>|ʡ&+h-? 9>xp`}pyX~*,Xo*Vo-DI1ao+o,FQѪºf%bA=_sJ^xJήwOج3lt6]z=Eud L/ rT y,IET~;Y(ݡ9}3<AE7NR7F9$ѧ(v:/.Ơy8>]oTe^E/zTt>^NzggSӞՂAEEt4dMjmWaϮ~2$Wð}`!hY^it>0opZW|e} Obb&~hO=^ՓDF b5)ЉC{3ۡԤM9I8'eqGAegkDžLj?{FŸnԏ#l.m}ʝaT,%!)jS=3HCQТm$4kj.TKB+.W( NIv:locҎ?Uӗ!]S{зٿ"Eɗ;zst>V|0Vcw \0-*or"zE&#Lj[ڀh9[w*]D,0#zyp,m8{wofW?{,cdM#MB*1-nWq8}MEXڇo@N GCb`d1'1RUE4$#deB$=%{FO(c8ą`zYIETI$34st,Bϲ9J9II d39F}RzJh* ]ed^c٠䆱T9<")6 Bv!&Ow<]p%źe oQf| k2bv"E*rީG踨[jƨIƎI"9s@ ^ol=H!j,YR&;¶B ]$w/rEdGeʞ3ޖJs5s!V}{jxDh&u[DώEzc},|]8ML\ k !8 &m5>hld)e&VIJ$Ua<;CM: ͬրW~eڃIU7&ZܔHEB)䅒M4Hy(<*e= R湇iP%\9`"kנD_<ʊ;W>=+o_>%l) $ƒmV> j|ɔ&kP13mrLдGK#' ޹HtD0) z z#qhP!QRMZuR+[䛀SsJ]dQ8u4n +jfjȣElRQ8 #ལZq(c͍r=ĵUXAZJK`[r'0tdi;y/lQFn!?[?P_8X9-)L)&0ńY+nS*#HC&(5[Ơ Ȩ9 Kn}!BF ti ZLf3s=q{c&owQ:&l*B>~3\4?NvۃsB<'t>1~||=WoTP L+CrX)+%01M. P'pPxOQ D+ȎxZGE \@$7H &g)]@:&ύ HR g9;|Gt$ŕ[!pCCy\Gܢ;i麗Vm[^jt>۪>c}G먜h1pqL )`>5ha"d _RVxQ<2TO9i&I$4,˞i.+&Џ؈Iy \pUà N'5Ǘ7Jjc({II`]p+"xVXdR'.?I}oRU=uZtPcD=  R2O 'ԇ/6ڼgd6+W,wQWe}\Jf͋VPPe1sqROW6OkS{xSƛ6A/1M)&Qt?1|ӻZVȕm;s ϧќ`yO8kgT0[.rޮu۞06 xUѰ=CiV9B"z7c߽>Z04pqO9" TvQЮ}/G)J%)moOc 2yW#ʚ#m? Nw2wXy[up l"4h3" t=a]}B/Sa*++ٰ3}݋7G.ĹMތ-S 7svV]:H=S=@?(Rzq4qD\#  N@gꃀ <0{sBsc5ylTm^o>%o4}WvIN3i˟ɿŶd_?lqcgw3RMQiSLSy-3#wy`Zr5v EYgo:5yzý&m 6mT'kn<푇QO,bְՏ(nmhᦷ֏x2BYq }D-NQ}2-,g~8nxFNj>:YVx Βb C6Ck4'BxIu_Ӱ]o}_}c\ Z-$vUW733K7~+?7bH+]w"88?I?%m]=?\a>ߟG-R(<*5Ҝ䢦L'DT䬶k,٫z݂8Pt H{$m6fzmB$պa|ͥoؠyE{m޴`1Q&3fYYy>Lj=,\]>?77t•};lu0%IYo4eg}NW Y"șu }/[ui[*7}F7[csW)zr͢4xP勬=ŏ~ᇣ. %g$vMmf6 m,^~K) nM郏+FFnrD'1; \[e%toH>E2]@͚ g#Çxv(=Ҁٵbɛ)B VB"#'!*q*Gogԅ4|yg(TEOXtpQzK7%"4h8T:I6h^&!>Vi'!)g8a3.8FKe8,]ٜYrU{oے!Wvqq^$7)Q*柅ZQj$ShզoN\HWwwj[%F7w~of~^~}J 'oN.߆|5pٌtckmVE/æg(E$  X`-#9=,bacɖԶ`<}$O^[`lݓ]$P?úLjѠ>fJѸ,3Kǣ/=yjCk*.*k늌SũcϼBl(TS}Vp4zgFtw}o?_s-87pM B{C^>Gxiţ%# Z_c MVys>1; )@R1p/]GYh5gѶ,М<[v4 Na^W *A\‡XS?t#AIǺRE&UGn#E I"Ӝ۸+,<=h,Wt] ?ьS-@^聯\ {w_}t>Q47D\/?:yq1i8kR-׊XquD{+DɶWZRZZ+x:uQiqןI]!XArRJs& Glح;v¥YɃ@+R,26\ŘeLe$< QΒ7d|1'v8M]6\M o.[/|qwB8-c,*PhRHRB1h ޹z{ c*צ^[v}dǃ-{˼Ǵd<ʨI&;T}EH+kTQ8gy!N;4"7Pmܻt?~v~⩈.:'^3@ʂE>ZD߽^^64bP+(+pWyQL?e3Q/PYQS}4ښ=ANIHq%mEN8H+Sez٪)h%F}MR)\֥ȲRdcm3de s\ὡhdn}MwYMnNUXr?Hy4 e|~X+'^FЇ,#.3{gU*4hԘ CƄ)f*ɢ ]Ўi2@㇠Ϝ LSƅ7a-Q$uΑlLVaF!d$&%VY Cܦ=pcp[2"\]Jg ݰ0l #jm6IE-q@NGq1E2DQ1D?FQ<ɍbf\X q 'f/kvYf(7mH؁~WRID25لjcptRxp&e)K[5Ov=׶XK_ᙨNmDO^ǥ]k@IE^]1qer<"c$!1c z@g \+elַ Ic\PN141[/HiEJ F8W2T9ܦzƎ\m w\H.\"*u.xM:/pu1]gTbbi>3cD/4>,iތ2ADQEf͆K5cE佐Ц&h6 f6 !efk7ۏGql k7;vem0k{ˑar$e`JXyްeiyk,h^8B(f /mIr +:dkb"kR$ApL%G$s>fY :<̇Qk/FlʈaF=#Ǔ4I8ׁXaTf}͜;D"Cא-rƴ7S 9a3\F"ѓ$Iơ C KZp‡3m5{5'^V&_gcdW^ yM=1ddCŁg[G42&1gͬQB>lwʇ0n|VJ"x/kez>sj2x1JSTG?,Ѳr 2J?RObio1[rW?'?י^Ő>)}OؓyLep=IU/?~ZĽat?f%b7]Ѹi& Yy@\]Z܂}ud߫Y|oAA>(2Dt蓫)o/xL%/_7͖KjnMN?>~[7DxflȤcT0Y0&Q-[wNo쉢w|Tv1nj)$zC $5d!0IJ>(P0iɬKn:*MZoy-bjϧ/9 ȍЦpKWERq`ߐI~?%]]uM(tyYHWVtb9nz_|sÅύGr07vP7u~LTۭ9/#{t7xziWˮǞ&-Un_^@<g <ֻJ@2q<9K.!>L7` \LU de*%{sr\>7-M;vM粬:p,-j3h<Ƈ4CQшExOH:bCJ7'q.jrVSs&,/[~->zy %|?.37o|nD+;&DM^+ V64 #u/?~yCuFιjʎepD֚(3JQEՖȉbk2ZkB~ ^FAba@eHYr1,\X-Hs$y:&&[~6CBu9B'o.'Ïs_Wl57tSb宯f׬S~>}8{ii*Xy "JRGv#5k٫`̔&(ghL{V$x&1BAkz_cp0 Q^cA|/NlZߦV+z@xhe)ryV&Q70`! .Hzţ- :bjQ0#Od[*nRٖ;'-wҼm)D$Uh&V c0"D)I%KiH*භ-_m/i ty'@ޙ+ۄ/ύyƣeL KT k*isO9RT e0[׬yyG.U0WOto6NͶtfR0+rVڿ֢3{x \#E=3==˻CWX) ]\m+tEhUm+B+({:]G6~MlA:0]mY{j;@j;EWz =]\0!*5 ]Fw ZNWRAOWHW^N+ >yWj_dnWWf5N8Q 0u `goLWK# j=y'@-nn;;CtAkMiP=M!MKlTeyg5+tUZv3z:B CtE \-2l;]7Dy uUJ ]ꪠԽ:Fi]``QW.]u*(]#]@.Һ3tU" ]Zx骠产#+T\.U݉7+p ]Z#Zo tute3tEp`A[OWЕydӛLt`<1\voְv(ete+ծM5kC4v ]Zw ZUAiuOWGHW)u 9Qx"kL!$VEoQo +vtllghZUYЪtA#iQj]Z3tU**hFpAi{#>+H ]]VUAiLOWGIWM^V/}0uǟ7=Ɨ.y=ErR8~G髃3Нӫaϗ? g)WT)Yԟa~Hn]BIܫك3ȔyHR즄"E&dO&M#jI,*'@x8 WVƍ IW+k}+?픱np.FxZ5 $5 (JVS{5Y٭79dٜ|,HԜ!3+ghSvqdb`ú,pQJv՛Ghbkc@U3tUZ"J#XPDFtu|t@`< Ew~,x՛G?k8:ԛVPcY,/fǖ]_Yλ)Kwr|rYUKxjTjEջoo}&Aw]i賂:_woV_R!{g,*zGhQHo?]muߎzB%__7|W<\}ҪF ZxH.؜FDzw0G4mC> 7ߜ}wG~vvǷj;}퓮vю_m7G_3/YnGLka.<]x,xh3Lh=/M!&dGIEvI0vʄ<5M:ȍz\A3ŕ ⯻Kq^sUZ? Dk7,:\ VO&0~kh+$\ ڏ Bq< Dnf j_̖MoNDk`}6MҞiM 2 p0\(WҙW+im5=ҥ ufS#A,G&۰~ LC~e^iQ1 Fik7a \@JF7 6ki*--:@\!+N+uM"kV Wћr~W?߽Oas!V 0%jSfg}ŋl)ʶ;590bMb׮-iCT@r@Sn3Pii(䷳C}w.ԿF^=A:J"kR{;֡Y5V~nSh Q޾{R:]^]#XFї=b )N\m*?o8A !}O'R|֥Ktw=|G?<4GGZ]o~{`|aT}xV({pQE x~Fl yQ?|F|?{_HDcFt8DAwgrPnIm?*oO~Su? [2z(IVC ilMHΪtFZwvE'K(/gws?K2, ׭nEo?NP 4~z/3=TH-졛15 0eI%-Ƒ;\:GS&U5fQ:kU:eذO9WS5{U]*=NMXre?/4n]|" 䞗 OWcO,$gԪ%Bj)L K"rбV4FDMFߵUS)rv>Q-6-M ruy ,զXꝹlS5$kNMSjJ'Zj"P31N#1;Q76{ptPQ-^rJꝼ ODF0S$3njuCuhQJ= zYkLY2d*CdtM(mBȥѫئ{y2;. oTh]#^-@_ ZWGоLn^>. >MacFgP2.Lfx_ȧ\ܞ%hUE{r'gj*uϬ90+Z[ur=꜌%x~j}a<,QsGZtjRhK }:~A?AϷ ҋJFl|]U.P렸6FUKe!Bb ![f8zm\&FUiX'Xl)6>]PdTW)鞺5sS f;Drn`]P;Yt7B FGhOM#6.5q;0QcXh7 L.HK3X\EGxSZú:ml(*wNV..*U@eluXB}+]ɛĢ#,Ni UK 徂Փk.km@0oTdٷbewa=g% ѡUhS%v5 EɷRz2lcQO.U!\ "UkRTL  \H$w)BȠ(׀)tNි2 VXcl&a0(Vnm vE}~e] c3J?g4 a8r]0ȂȄINq1q>"izo< Q,,8vPM;*s 0 P#_J)Pu>!KAT2Avl'J0 7P:3+G!@7Ǡ1nXd5O;YRR H`;/uZ~ BRFBUv2bU؈.Qru@Qn$k4̢r7`!Q)9o,=_6ZDF fblFYk\D d@{aD+O kE~B\FФFyfpaPer^C-fĥ*ʌYIƘByUڡ;iDL06'm>i͓\6+Ղ^ʻ@!ZIh|TxK0upf(Ow;"J`#eKW=$ *Ye ,c*R09 ~XHhbBgąs՜`  ĘV #)eH .nKm[|OCH}l,kwkT<128xGcb:Yة׏Qߨ"bU#mC6YK!eD!v*"}XaOw7>COE:X6OXk|l`>țˠi$!*KtȻK_>ӳe?V`0I#b蝎anK)!%lKRJcEX`ȃk^FWXPv7%1m!N=#' ˽eg;V`М)@D&e#kvq%so tv Hd4,?=(F;UÄڰ*)᠛FeR]aA2J'J<s~ZfHwZ&&)@2xHmRfVUrDm(2ƿ *54,/^q6Q*)0ĵU 9YG;X綞y~M ܜ@7duM{`$IQ-Uaݸi£'eKaU<%{lw|wR%3Y:J ՚k)DmzIHΓƐՓGCmׄ fL]$|ڌؓUAN#`rX)P/OH7"zh8g$Sb+괶` ! )ɔ1J3gzD,#'0 ]aP?{֑dO;X_b]K 5MrHJ3ߪ˧m+ת1&y/էt[a^0r\\p$r Ʒ0wl/Э aN;B -J1<)U,JB2⓪лg=k TX@ h\!36,gbNiuKkL0r#S#5܁:P!S85B$LA &}*@$\5%i\4-kÎנP#7tBe_j`wЪ8 -X0mE @+}!F)a:|' yS@ S:(`XbJ8CO*p+ _4 `~V;M>rIVŪRD`.cd`VB4W$x I a7tŒ]O p9-`Ҁ56Y~5 wW$x;g{jLDGuō| H$kYTU 2`H&?}`6`~+@aYY?|8s󡈳Gb`w6Nz 12I;3?x5;[T~"K3e4?\tIbVe\P|>]^\pa_2/=X`Zz4ζ)A}VGGmf{| 궼HS݄u4?ksoK -8F7Sy!I 4=RF ړWސ* ?aI DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@zJ Ǿ=Un-h}QtOT!%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R*Κ+`#%TjKrJ VT@Q q$%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R*6@XӡHko|@r%6BW&I @VCJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%3R}Z/Sb\L;g4 }.Ÿ́p B yWY-#s.}5=+0ع,qYn&Zspe;Ѝk0=WDM_ Ԛ•w1#B}+0 Bk?uB+#nJޱKs^{igzCwgy"2x?+\Ic{L=\+T_ U Ԝ•|{jke4淳y]t ೗g?lmܶfˍYeRKrПl:/^КCwty^.0+gM` \Fk8uF+$~0-UOpDhe}+S+R3W #mn?Q㏭NƘ8G^KhE`kY?GM*1x6tUһ x !E:He\C| ir5ܘiG u 6_YF._<4J΋ٻd[f) ~$YŤ}He譻 :1'l"1pZ@ *пnBò8xf)Qμ} O@#m'95 |uMP"r+k*3T4̧a}BǞ?wuy}[ `dx<2xՌB߽ =ڸAs Fko)#[D6L^ \Whc/>J !\Yf=+lt_ =,>QZs+< `To սI:FkɧM>[9W<0+'2dB\)WhVWh!ij+dt8~.&a:Cxի0/&<:4l|3j6uݞok{&J#Y͍6jm/V<9 +ӧQx<9r9d_m|^ы+:kBpXr~O7*s^Ѡ[!YkvbӯE[<l V_ &uճ&j8Ww}Ͱ9zקeJ49٨bC hoۛqkw?J>|-75t=\oo̾5R_v\|8ŷ/m-n5]~uvbŅLURt .#dٳc>+5d/Ol/]tm٪}h-F<6KھyϗspG[CD>a+Ywg_mz=Ad`t?Vߌoo{5Dr.vViośxd5pn)j8I31~ՈۏWEy JAk^Jqv«UvRZuצni-ein HwDrנ:¾ۻ+tGS}`ĻO*aYFChYPFLx@uk+9/ЖJ1 \tܗ˘Ss(#*e< ^GuvfB)P`ld?tLX?^ýxɍ{Ʌyomw0eѕ D$Xb$0̡)ɭfo.X);iE0(Nj7v8P;3@-oma)q0 OMO>=p|ٷ P+謹l\KA_b6m^'òɋ;2ʹ7@L94vA BݷO " ʵo/}_SY@Fr`cJJ$F8t'0scVn׶Cd̜]vGboXxapmm~&-&hm`٧YgB`6[mI[ 0@Kqnw ՙCr & ] 5l{ԺpL+6eJTę#$8I ee\%<3cD̜È@WGY\ٙc\<1E9%q+8Q蜕*9=cagq,Pake--oȃ\-xX<{K"o@ޓ]zŎEEx$8~ܗ/Q@= 2X˧>~'^~ֺ'dJN uA"A]Z@7AV6a*f(+yfe/>)+([.R:kx22vC8HC`@g}0q &=кb(+J>}[ݖT{9(z9 ߽6F5m6Nk&ywǹtnvx{МفB_l 7xsqb5W%o^>\ܡ7tI c:NGp[~t1xS<<拤fZV 0jb]+:351 r6'mN)y1'bqg;g;_^L*-J*5R.EͳGn)jam`Vҁ-5?mn٩qˣZ쑸Qt-fF.yee|NJSj=q[j~< -nv ތnN_d“wv2X42)lǴ.r/5KiT5C[UI9+[?8WNxчB@ `Dn#HI9rcAFE_mh.z-nx'gxB}Ȯ.x>۽qweqI2X#" ,gd0,#yD=$/odɣYM6`+8$du%k9ȾԌ,DEf)OړMg\c,'Dk⑦30 Zd$xכ ]ugstz>:/kvp*7:ܠ'2GǶۛ[[OobE@"8]C:Ȑ 5dy +% m[*%Spȅ`ZUI!xmdZB9Cp1$xN?u֝ 2!H);)<(>IE#WBsMWU%,eKT*IB`_P4bnAV1M땱Ee9' S6ӥRCYaM>;g;hzyoj|ƞv9s"D"Gx4JG`#ZњƄdړylxl8vkߴCmhxoqqr&Y*:rA*jkEJu}7휣퐣&@j Ɛᕼc*5=#a ;ގf&XR+ :ꀔH`GV#E29hκAR;ّ~dl=0v3ޙї7(ů2ddiY_ /{I}4\A0 S`sϹq"qpgȆKWo<Ѩd% &+) z\sGqEDϲю r %sa"p¢1I"u@ɼYgِYCnOkimdGNT0fcR9)ZI4'ZDT^c9ڳ@,\Po YYPfB9yY='$AdOJ=VgP+NOwz;i`;(^((z$Hc L`-Az#A̘ ({W^J%h➕}w!x5]jeR,YI+C~U,4-!kiR,*Z-o#B}c]BOv:#26B0 3H QBtL,&pkBdlbYeoUY){,XKE{;]o z1)5r H<4@ğ,`' 6`:6Y,2 bIhk+ȵLդ |4( I>i 17I~fo)#BfR0 $ƒnV! vlĮ l)3,l;vP"'4-<2:qϳ h@o18I$Lr*p:vunhwU RNo)ٳ(?ã[ҕ,ܙ,w\#&C]ʱGGOrLl Nqޘ]WI-\}NQ(Ie$0yDꠂP"DQCPR{hSj^y)Ub(99TJXm'&ivCr%W2M;2 s LZI笑9B6捺(*ZV_]ߎZUC'|)* jtjJtBƋWUߦInd{Ѷ}x2}JݎS q޻Oh4 -XJ=mXJ]FbٜoeEAJR_EJK!Uɽ,oe E;2t+}5?R(Fa9dFC%OGT 軋wD羖gu)xCVh'gMȿ3"aZDʡR. KZs1zca&6Y"w,˲HbEtQB! ΋>8I- h^3By+/L93Rt1BZfjQKN D a(>I=n(&8㒉swd\=?Y87T4=^\kguv]sx~:0N֜\v x6ҭv9R Gß.-rrOqkkɹ֖@Y[3 ubyOLp8j`ŢG/ N+ڪ`d[ceT_GX>}1j"6F3?Q|TM>`8]1_Hw?!=| i L13b]p¯|\kooMWMS{ I[ڽ>܊e1;K0.>YEfǣl,| *-^n͎ &U_J,^ųԈ+> Rf%O"~(;P_R"[hl$[/]i#J4'(5ϐ\6`U4%Ϝ^EM0d VCFĬ 6c;"o 3cvlctD̘KB D>X+tG=Ntv*Ctg4B2DggeX] zYW2v&lgr8? ]"G eVK)C k0Bx1y.gq=%38}U{iw.Қ"}]H뉐U!K?B` IrLR^A _k΄Щԃ>ӦY.NYpᖫ,u+@Sƨe< %6ȘRǔ`fbSʀB+/|fnW)J=pdκsCdx[N!>x3F(š$j7=8lR8Rh=ʼn8Su{d6V3o-2!$tV1o#fGKJx$Fy;a9z{!iZr?rNb/ %{ɼ7΂6OIoPFM/B=vQ2ĩ1: 5HjgZ:lBWinm|oO<3R= 9~'7VhQ 'r#D&P+n1O*hbDx۰ݭ%R||uF)Aݙ9!z<.ɠ_z-^wo0V m2Yɀ)+xK&62M @3!l/# -_M ^0<`&YDv, $ji%N>zѾ pLAhKR'ɞ0KMIzecTu/KqBH>o>RXz̷v[zDwylI9 㘊%lj2^XJ&&RD^!aEzӍǩ=M2>.&O]o?9NLZw/I$hwREШ?M~v<}e>;IPGVc+/Q t=<.{:;[Lzyk_.׼{_ W D>_Ncc>X?GЗՔ:+Ur*Wg~vuŧA&W=cϧe-mC. 7_ wCOg'b?иkܓ/'74`7=Iǥ\nxP/y;jab\ h@-fbNbwԍiΉ<=/fNh$t&zh=u&Wak^']1NS{&|O+iWu67 ,o}p1EΗlb%Iz.n^S=f "jxFǮe UVtEV%V kCFWѳĢsBKRQ6z.8/ҦvS-7x]@<%@FF1`K+*cVYQJmВdʘL1% ^W80fPīƎSj"X^W y7֛q}P*ַ3NgʬR^fwe~^f~a>M]9nwJ!2_OnL5}]<`ˌ˯gqZן|Ukdӂ䢥L.ZIEr.ȹLmZˤw?t)tM>(H ZMHnlœ%_V[t -PnXm\>ׇ/C?@c̯VZ,wj;||? &SqޕIȤʾQ޺jlS/7xA敠.fNKFMڿU|KZ^U3g}'JYXr-Jӏ?m9c?#şAd(MSнJGF mrN5xjX`(Xb?)>էNNԯ Di k’^jnf34RaɂRbN= /qǧ;@}}zљԧ_ >au"\J  u-Sc!G¸d1H([2I4}{7U u(Tu) 1%΄ ݞ[SJ ~jd2 lIC74gqw@{M?5GZcK]@Eob>D H0Nr_F:Q+F:Q87F"-.kh%xvvpxtX3SZ&6Q%Vr]C|cPac@h$.PZ~ppֲla-;JmDٱ,rR\:"lLjKʉc> Hq}{>I˲4lsOv!6|q+fG1ϣף',1\ ij "*DD8C d֜`9dIsLdV$3BpLB@ґP\e Ff g9>uuvvm)Z=v{&GJ˒VX, ^&l8-b#DH(J~q#,*[ɢ;gOxv2>["&rBxIe^mRI0}PIQbcz *SڕJR&1+UM\O ": D(ݚ֗z7E d@f;WN[Iܢ=/6_zU]/PY//qg%DdCy lL FWs|`ywfyj;g$x$ᒁ@ƣcb{cɱjUZaq2 v hp4mi : (h9x&RLBtA̐Qޕ:cPĎ⸮[,Cxe:1e手 pZw>ʥ{K0 \H="4>IMɽEȮWR#_UjƊcpeP` R+m<7Ch"K)Cv&)n2Q<%kb(QC&o z>/ kp']S!M; (im@ğ`g 61EVrp@ƫPVCp5AR*7(Š5^+Pi1f1sKN3{п+Okd{ɡ@+ɜDCk3,L"J-rK,ǩ{l6&y`GcM;O} HtD hsy+Cjlv7Ґb|u (tO;Rݽ |jmpi[ nUmc*Apk۵Y=:RLZ-[+$ȉMT`m̈CEL!͖җ0c>~8OūczS*K{` d?#m?=*1I+[{!DaR$*&9,Qq Z2^{KUt[}s^(Mj\5Nt4,h\Y3ӚSEz?\6Vwo:=~97;2} G9a~xq 4=֟;oC5 $;{s}N!o.{vk%lxk[Gx[#{&ǭeX* |sRn!db͍+I$Y|͂17n(= qPp^yc#`Ic>p#u&8gxN6:Y`(QxMwڡh;P*v~MGogWp J&ô)dpL }J;=oX!) R,h}t\ۈp1˒DR!-s`!%vN< ˮJ&t&m'ī_:m.O؅@sH|'Òd*pFhRHdCIZqt v필vO = CVC[kwѾy&Fq_R%"Xrx1ݖ\t["j5CDT"T Ƚc;ҊpΘp—B1'5fqvzR#[}E,6A5 Y{Gˏ_R1aߗ}_A^B"ǘbo+m'x_[F}.L rfPՂTof !kkhqsd,F7Z*|ixhJ3~|ZUt~1׾^;C{N":4Bs҃R LVIY$" qRc{ήd( id +89ẢOq~9nJrg]4E!ݢL92i`)dpɂp Io,)3dj& 5\ f fT oG1HFȖє:*#htBB,q J1H(6 @(ʔ~G[4v(F."":t7xx`1;pZ@1iEbĀLĐu{}FSGGqT=g<(,'Dp{Y:&2a3M ;RA Mt }KLM̤̿̽!bbfXji%K(A ҰR>oRfN)OR>\{qT]*Gս(%G0VTS]dp.V)(9BdسRy.K낷 C~=9ڹsN#''ߝhlK9n,jTZ*6h<)P#)4!cFY; ZDl HVF[ :aFhNP"n6r1dqO=M\^u\1_} j2V뎠`Õx:Jp֩ uQi",XǑW@2D֛֔p9|cAlH(MPd8_Zk0R$rH'yfR*n9?@9O~\/hޢXO~Ooq z;Bw;.%>o^d- 1KD&p TQҭDh:vvzx9GT?7o^/_O/`蔘Ky0R=۹Ñb8x=O_M" 1Vʖ_ff+g!E*j|5a.>^t]\&k:'8^*A[:Օ}UiU:TI s`.U}]b'?VJؚJ?uW8:{W/ϷWg_=Dgo_=/0RN$;M{4-s5 uMSŶh᜽n|vY.hLۭفn]Cݠrsol W}`k U cJ#68R$#dd.s1$6:^%,r1=b:RsqTm*>a1ri;r\߆AQ{{_5ca84\!$z9-aP۔愵fVWK6黛Az$I1gmLݳ4g Nױ qk RݢM GgGs*'xTo# .GLP{%oJqxT}}Q+~āQIьJa]Ԕn)xYzX g*w>O-p6ѷpj}U'9 -[mEluf_ tXvqx7}a i>iSzc+O7o]Duo&z<|gr=V Bŋ^Mү{)Wi]gǣtcn;^P咾Et JFT(iv%ҕ#YRF8aA)܏|H^Tbו/@?LHє0ZYIx\w &sTsUF 9[L}2(:o;c PD佖豉hj@YN!9[ޕ>ZꉦO_Gp_gɽc")+Xr$`I`E%1 d`!I}V@.p@8H$*Ű`2/ll^}R1haRRMFW7}'= ;oj'MF[xrR+mUnQRT!:^R{#+*o6c QsPFBr#;"hK 8d< n=CQ(Gk6#'Q-I˫!-FjRcr#c6r6#c>Y%qD. Ci/4:I+"7/ LP#D i [.Tjc`M"j896s=cL,Yѵ3ɮ6v޾|2ȵU)ag 2IZ_u bV&,t eSz}0jC2"Ѭ+%vީyVxs孫5ϕ\hM}Af}ކ2 YXm*n34L{>.5񻦬u+mBr̗Tr~Ț(*{tGͥB-dMdp5IK*Kb3^rÜ%Q [Z: BlĪlIޥY~HE!(MbҰ@5 Z5*D5Eɼܔsʍ"VF /"( -J)rRKOF 29('O Wvim 8j iz Xx?[ܱ#lP2X <78`Rab$H=L"WC&QDc$*>Đ cH@yBpktB^%r)~*ppW_!\)pVz*pŽTHW_!\i W@0&U"=*Q+z/]%*%9W:QuWu~qeUbKxY|nΟI[gGù3Xև],`aG(EK%-QĦHO򂞐ch|S,~hg;jSYoi_oN"wnemӃbSpi0V1Hc#uP-tR+,DJΟ~}Guhlooj: 򮺌^듻s`8nܾ=ewem$I4>binÈl%J#JrKH*iRz%UQq|K[qW1m%k'./~.v|hϠe*^'J*RDv/~ٛK#g݈'R}~<Vx2rvu=ү?7nH{7?6nF7ڎS=EP;3g(s"2 5JYXmmp`c5ߐЀ):]Bx9jdbkfl>JuhґYa{ )z~v3$^>}j`$k,"#V$&ſZiZI6Q5l)F((Def1ޜvvjYmx^"ۛL>lgҤ<1ٓNfӊMJ#2E՘ߓD!m& Ɋm U@y岵M:cB<ԍ8u[95[΅C(JX+ KBI <ڜX( &ފ~/ 1s)W)`Dj_lV=Ϙ\]B&1@fP+ޜϲ.<`)bW#8SRrʹfЄ12Č!qձ5Ӊyik7ދ x0.KP"2"-ЌRWZ!"6[:oq4MtȮߜgY\6>򀃳k s. rCE) <`5Ӑc?Tv\k~$> IzTLD݄*ZV?6q\>O/IMnN~Σ>̯Rdޝ~_w&=~:6}]~G5 x{tv'`~N:P?&|g#o=bϻ۴!Zd*+3oږ~zzzvu o7K*,!a oƑ26zv؛l~faoY=TGSLQ| [LE"',Q@|Ӭ^')VqOɐ^.&)ʤfE:p`P%d^3qq]hB'PXkѭPVX"pʃ!*G!*^D<>k@%v5Qn 6vL6x̅'Dl:F" &k`g]166:2`#/"?mi칊>UkAC:@@vK仇b`N`oJNZTa`NJ [0[{ǐtFFxQ?c?U  X_D7T:%Cqʛ*NYUŷ^}Jz e*{$T*SQ7PҶ7jFJe`;L 41!A9}hww?C f9a\e]Њ:#[5:~)Z HQ6n/LQlJ'1+9WTVN.ewTm"ML l."T'&, -VIQ nDȩHjq3qϸOOjt4F niNp|4V_~V5r$Ȳ)E3R9e 嘋gef@BF6tU8{T),| {y\A+ӛ'09F  $V0\_`w2UZ[.:W00s^RyKy.8,rCٴ=ď0Rz=tTT !&$_"ޠUR*|*i_/گ*uHFy#圢PQ(RbP\fL| imaf:EP@.ZPz Q*Y$L8L otL{A,OYn/rvʈtN~Q#$?y!q{9jֱACVaPhkk:QQdpV@Y: P a?1hFU䌮 E$ MLprO`2(r4)G|:\;r.|<J#jN.?>%^w֕_x7'FON^鏎Ҵλ8:;{0/iu&~d^QڋѴx|R++IZ+\ОЍ<"/3%:mK6-tV7Uw־ߣUsfv:(/ыY׹M})oċ3˪\TYUk8?} >]p?rC?yIaզ|BjjgBkiJ]~'/?iFv̅fNfg~Dž0{bM_feX( 菅MҌxk}S7=0oQsF{> l/ǚȰIm\mjol7ZE?tDy& ]ըu7MZ+3 lB6UcV1֫_1||Z9-eŌyy"̲˻&H4v-u_O9{ҹ\]@)ى{N'Of|=/hv꽠|b|NxgW~o"N8U248yŸv3F5R w^(ddFK>4L:;g ^W50OdޯYƵnd\ƫ˻mp^l޵81. 0gRhJ"%, [/G}߭ښ Lj&FύO P.1J~" 3pTZwZEPTZF]F{R@J:ay+P"5RRJ$ eRq$`683kute}q\Y &u87`BSB=aaq9,~y,;΃'kqЊwj'sq/m]p[/>A0J('l쟂roGx.nGc”ͻ \UGs\l$9<'7_nqzGO{Ա?Sy:ؐhWܫ|񶸲JTv4WIÙSK5(8X*MN5.Q90vnqx8ӧtQϦ5X~߼^5PDs|J̥맋Y=v8R|o:$YԒ1 [.56\bsO:BM@fm.{%S.lN:Յ}ՄilT K\> }zA*8VR&TpMZWׯ ߻:'G'?ySɿON('=>58Gu.$lM[MW4 m5MZѴmumvyAهj6m=?r+s~x|vr:-]v_"LdWRΧALCYTP>T,Jxo-d]2>1+.Hic##DQTŔIil^QetZ$a66LD^YqhpI{opuDP#DzG\`ɤКqJnNp4ӝrSz[[륪{M}lz\sفsy!I4N`U =&>{ zunWMz^hE>v:Px9nG''G# #W3wo6T=sLU+jg&x*?hU*Os|ܷ1muY\ގ?A˫=M ~Gշ+?l|aOLmQQ-|ûu]? ^zFM[DcpM/Q /{|aU/2ۻgQi/P%._dF%ӚW:XIv#ޔ\-Io?ӷuϞ"G #&EDeX\ ȸKRDNF ꁴޣ znl $I 1IPDRBHψ1P@xj;VSkrwsav ]zۿ1$=X7RJ{;kdRiA1+H})|iqE^Yi?Osd>NF8]gK 6Bw |(.3XG_gwFD88aӤ= BCC$8cӚRr$xi 1@[9YMŜ=U;J1Ye 2jiHtƶ /)ĕA@DA0B3CQ$2gJZ<(8#1xΒ-Fj&j]MF9sǜN``!Za1,ѿ)'aF4U.fπ-v}!ʅKv A5!ٹ"s>gtʸYÐRHK#, B b"eCOt ;~vvv#[TᤴT:.BPB#"A"GS pK#JQ }fkQU?j)v hG'j˺:Q>:TL|q#<ɃyRCY&P907&-}5 {zo.rO v;d90nG!8߽يZMqlG%YD ep,RQW|VwșG4Z{(_o+mD碭{Cwekt|zjI|7U *HYG/QW~ؼXC40ۢ_ZbwEmWn\`]f!N \z8%j8A<,/{x97$`W\Aq ԤW?qwQ մ#ne+{l%y8PYQX$bЏyu7& n"y!My:zzzϏ=lfJoE9^]E^_Xi_a~0a3iS5 _уY al$҈OkS,ݱ̫bt޻OI{ jw*Z_߽9ړ)_TRxbJ')9>sV,fY^SLg;%$Rr])Q!ަj}ڙ] N';s4JR^0aVی˘47[u{Ƒ!#!8g&Y쇬 bL rW=3|I4'@dӜU: ` &(?hn#i_$C+uZb؉㝋ZoӁbl4mD6`USCL9XhQF`*;(b=$Mj΄B8穲{6Py8š8҈stV.i/4r2>jef^0QKs*$V`PAH ZgV#BqFbͰS1LT؊^m󻺋 HitL*"Q* J3BDI$'}KaJ+Â,;A`Z"Z5\`s!]1`K""JzgQ{CriF&/1Uy ??'ofӫꃷCb0xstA̵?ǫrn^Jp$?͒~| 췘AfumOo麮Rۍ(O7v,@h} 3f1n>ѓetǵ zJ^'BH0SIF1Kjp6*({/%lK?˗o߿K??׏|%& 8Y- hq9xGײyTfQWK!P|"~ g[n!@^d?|3'eك9Nxqe#Yg~ zWTp*P,r׾P!\B9OU8 \[ukb/ j}$\ 82=QYŝ Ti LFT3R:驃 sW%90wI @"Mcy)E!=,58hݝ4r3 M[ 4m+DvQd9txY;~{ 56_c4k}?!lR>m;<~wLU6Xoiz?)QRȚc NxEfHD+̽4Pd RD4:0<$)68t#РǣAiyPKzteF盐O_"+ |GV#$A+"BB m D;mDx+kݩU)jpW<7 .un֬5f}7pOp ^C|A{4/7@q6~4lxٷ'y5[igR=I%)WjkucaG!(Es%eT!t:Q 4h6 *ߴ~?-0Ro"3'knBGCXȌ8 Pz'[lJm>âsf:o~UfEƧ;@J48ՇFI79b8Vi(aF7TaF@N#F#$^ظS bx@W 3애*koa0/ h@'DK OjߝM6r69޶H0" ߩ9v ^m_SqPO󪃗V3kRCF%ΐJN"]PPV]h[7FsHEm .(eIKl(lo@FC\"1\jJ#5KJB;A . aQC2A;׈KǼUk5 nV=PkvauU+KTWkA-9J !A^\<_]ɼ,/zA.W`N\EO_xU| *ՊHM^>yo&fG㻣\G ~>qEEh~ē;8JB+a rQP&RIK{7BΊ-{- ^x >HZShBxIB@mO9ɋin~ ϖQAa~2ysQׅ/?R;/iV&wpR*s {Yz^:_oeh6^GT2*lkgہK0; "9GH {+en]VrV`vNcav[a-9hNws-Pr(Ĩ('= E24q`9NQL4z:=V)n[EĖ#h0CJf/hl#o]tzMH|:{+W"rO/ڸN̞|F]}{݀)1#Re:Yk}ᶫaaD/b0[#6-rəs4-憳XDf+ǧ>=dRtpUuQZH?/_~2<ÑGmCN}\B:yw'm6= >Zۉ`8yM>jb w M-JĨ6xΨjgo7k-L;.o]Jcv ~A۹0`m܅Jzܫ{GǺ2ș-9Q(R87 hM-) },Ʋ]vOHKԚx=(dI(`?hdT#* QGQo H0 gNrJ #ap) %RR{q%Pk`0Ze%LZGPܙJy-o,h 58JH^)ga"D6{Wv3gG$..V7Suv%Eas\ܺHQ' " -UX+Cr)!dUY괒߽ AD Rsũnxh;C{X<|CVn#wf2jXkŲ`\:Տs\}-BTrMf ΣA,epjeݱg*s<Ȗe @ug4eU%bQ|xdI2egN嗅9v]N;N?u?~oVؐ` ַs;z=:~#]vn3C~u#-[}[g1Wa\|lj$ŗ1]Uoiq74W:t!%'Oՙ9y;mɠߔ\,nUZsIU>3(7; CpViqO9NcbmvĢpP7bAqdDs.)$B"X: -u14MЗ! /{K^ʰW{>fZ,7$ ~ ^\z3b3nʵ1;Kvn_Vc>ۼw+e܄k/Րk" rƣpgHpRT*qʖM^!jOWק_F\Wϟ(9 6)Xf&UnHƸ!Sլzfn~g?n ugoֿmkOsͼbNN" UZ'uȥpue *<^h2%Kh[<dAj/-<莽+rKcRge;LFkXP1*T _6@P:sKWg#\Ic)zCo&'l:;C|9zs,dTiIStY DJZcrA b_zV,C,} 'ƻhG$qKS1.fn'C]]սgnޱ)L|k]|P|eXYiթ!glۋNݹ?6?ܮ(sF0`SC ^\Wj75 }@/fsQMgCJ(LMX}ʚ H`I9 K5SY!`Sn (CAU-D,3)ޙB.r1"H/shܽDWy2kg,|cu)YY $a.VHIVxOUL}?2{ʄ6!ܒkSE~}C'۞܄ZfpK2hNR: gӁfZZ'1 œM :@yN=i_\/L(Dp Rڢ52BcxTjQ9Uʖ=!uh skonpykX* 4lE!O]͜=K%NrZy5TnBqgq|DmR.@ 蜃 Ib%3[k)+Q"XsyygP%ىft@£h nFƛ^1ΊQ(_jd[WTZ 1bPs6ՑUTĠb~͜=l}n6mU>JNQ{ JIŬ%Z=Rɓ˔LX 5 t/e/qǿ*&6p %U%`Z l@&˜RG5CnP+ߜO)fݱe@ mRL()c#[.gu8gW|X1q}]q_>t[ XCq*("eV)FyP[=`d)p) xz3>_жȃ^L C:;Je6dy``sE STsu5[MYU(ev~qƆ)v!Shkeao׋ ifmFG>3Qh4n#Z 6I/9! o`f|xyRٳ!+*; #=f#􆔴`(i5s+JZZSWjVR>Q0P̈́f o%\5kQO=\5+•^ W\to%\^f9\}pDVq{f/^GWzц|8Sşr_,+yiS[_W-G?W>բxU`kyV$+|+AsןuߎsRzMyvZ]~ỌOGkkۿd U YWZw%BU!eF3RL>Ym;##kKՠ1'%J~{&ޗٝGͳǏ_//-rteGF-I<>Lĸ4eWzjﺯŧ'D;,1rC0GnȡXoU`mҼxLosP I2ZX+YZbQِ zÉ,L('-MIMԵ pNBJ\Tu.1HjeukMJ(?=*%]J" BV^j14ҥM!,C4o/ka}|zw}4wS i4l wXVM\Ti 83eb3~?i'?iB&?>ŕckVC4{m TT`|b*Ytz?>$闥/{Swa~.C|sr{^'&hkt ʺR6Vg2BDXk 4T=sŧS6!<UBԑFrjbT9jlrHf{gv7sv{8Mݴc7_=vSr +1Kܰ8]L%4n -iT *=sKr˥Aw외APjn"tD,hac`&H)q6:X&W :g6` j0r 486.@*39Hԅ\h 1RHQGଶq -eLpeh (M\JvrDBy;q}&@HT`S,$e9UDH~W&:.)wZ1|򪵱ck MGRTìu2r`A]2wHSyk+-[ƎƆڱ"?mM'77\ jӁJjO֭f $RDžFN,T,r%mM ܾ:-FdK:ˈA-ɵIEq@LjIk? wt{Pˏ{ǔ7mtX$}ĎEg"rmcuD rj"p+GJ1c]L'v- O6tޫ5DGʃ H 28kG0B*x B.s2.(v3U>/zt,WLEv5>]S!&0_8hzpP>{Gr>1du$Z(gs0Vcbrl񦒠7z;u - nT%x/$dV؄/[S9Ʈ%9 Uj]Qj`+'A?kISLy!٩xfOd{`%AhLҸ6 v%cɘDiB"h.4ךROmZ 15gbÅ :#mzv [|E J|A`R54rg6x`O1l횒Kymժ(E?BɹX";c.C/C'o$׾~OͿ~C=́C[F<q[_lw6={bvN{͑c DyX_'dX fZ] lHӶn·"ܴ{nz<[?La(rČl 1#*TYJVh1ݳ?wLszh_%(7xH f vhʽ嚍| eKq0HKU60)yH9&r )6DtC .ms=^g][&i`[EIJ*t~I@r|{EpD$В\71h:@xEB8ɼ3sN("F8#OD{ yĽsozL;|>mx-Kw7M2xddJ <" s͔`!{`4F%͐Bw?~$)u%Q0Q..!_4l"*HU-XDZqk&FύOJD.1Jzp-ZIy*,S5qC!9)NJ(y/ݡa+>,:9V7dBl}p]NN8/s4uutm}8wggԙ8t[ThE>a6Y;Üu~cؙqU$9cD֙wI_V]t93ʥpB59!CX}\JrN?Qro'1:3ٿjUQI7H ZDZ\!$ p޺|Z5(Y|Jц g0b^޷D^ukN 8*#G=dӨϪ !QG9g6> }(v)=$ޔJԉOnOx>?;?݇}w g`}.bknv|ɭ O_n뇿uG3ցz#oܭw3:a+(6դz;-j~O*PΥWv!}iKB7U6|;PbNX"6[-GHtG[o}HPLJ 'N 2oVx0M#dtZB$Hϝl##:nƽ7JDC)qFY'$5K |h4)sT{t0:V=O2jvS#_8]i}⮿;0W +QÝe8kdelH#؄byn!ˊ4sGLuĴA^9Ӳ^oxt=ڥ釋m֜ i-ӕV`+&vԹY(g0?E#B ZjR3LDIʌцĨ4AQME>@Eۇy/y<8-[dL$.}b"f<3@p&e67 s,PpB8R*/ޙ#dCt[rn5; ?깏,wQ:ZDɕ%V1fUW΅h V3)m1_l- %M[-bSJTOSo549ZdNb ^u .7+m(=@&yK%b>xx+AATP\Xdp*T^kfE4 llJi^ni6m1mmG,|齰n+WOgswEҕ9(ia*!z wcy<ԣRP #c%y4j(fmdf[1B}Ϯ[On.R6}^]_k yǔ(a93DQI +mY/♍*DМΘH ZS` cY3u  qР%WN^-Pu_YG'$ǤřЖR)9"PWz;B$/#P54ɾXiEFDm5yC^2U\8D"|&@\µW6e)xEh]Oi=ϫ<`}"RI!ք@$9y>LK](,Q΋z"'u{Y?+Gh\l`@MhRE0Bh-#< bV_ꏒ#1XEn?J 6 (PT:y!d!T‰g^E58~n"x .3V&n "pMSVY$q(:24[7x7 S\A%=#мWr":9[M,ŧλPFe_2Y/p |Y9Wϧ =Lh w'?fA^]d9rPv9)a*Xik_gtS;t3M %ɬLӎzMr>4dozi="gjgoSp~]^$2E4Tk^#\ T }f5{~;Qo+w7JX76, Zm< B7>yܧa$377?>s|XxTmhcσ_QtDb7\+ٛCѷZUZ5MLŞEek$ Wo;^m5{ҙ\So:^g|[nF 0Aӫ6|𚰝NDME{ۚft֝`> Q"{fgv,#L8"[+aDATZN)Ap$`>$kmI _n:~w9`d9 XIc_5IɴcIԲHbY T˒Y>YTy~QA2lFA\"1\jwJ#5KJB;A . aQC2AGBziUlscZcf9j= f {U=Мq[ၡ9|O7)y.Xnn\VܗU6.׭Vm]8CqߣT_ɼՃ$+bVwo7A׹3W)(9BdسRy.K낷 {5?p^o{t|;A읰?M9n,jTZ*6h<)E;E1єfUu4D{S(sVA-ha[0 ^ИYa.Zz_O[."\OP~F ﱲ<ղBY4zqtʸYkRӗ.u}WLђ"[Ҥ5QzILTreqUvIJ5f|9°IT`)N`G#iϝ Cje\ΜnϦ},|5B(6`y,!fAs 41V`hkSmV;XpiDtvQ,9⊀pMJ2 (oia 8CvYI(J1eelp6 jx7xPKAצ9ty 7ݓnתj񊍨:F{[0'U&ǣܘzRun+R{}(&nVV޼Ǟ`9FwDD_U<\E> n=CQm %ZE@H1 .K%iy#32$a&%&6H*73f*ٸ0ؑ \n-p-'oft}D9RbuW՝_P˧? Čt[JlpHSAAAFT1bZ+2aLuݍP42d%$ڤB:`^e) Ӂi]FsF䂳ڷL̾vٱ+k̬G5صaD!=R(Ca.3ĉ{l% kd (\H☁ !f`E;$p`M@8G;Q10|X M{fk~ʈ$3#GF\{J0c \8&RSd16 sˤlk%g63[PI94(&LR#1sĺܑt}`-l{OldW^y8#DHy0D)F0֖[H1T$3!x/|mvʇ,3!z$Z#7z >uE!n~r+.᣿MXarpo& ~N'na??aehL敩JIarQ!_Yev܃tbge]Fًr+d9`*kIhs}kyeX$kK;ↆuQ?`(_4W~,jq}zVx槢V>n%h/~N9/`6_'%ԛh?C0 |0tUB+t*Tt+5 BCW ^tжJ-G:AG 0fh0t 6JhkOW 5 ҕ$!0,Wv OW %#] ])U+̅ ]\P*7J>HW2Ā*Vá+`\ 1 ]ɎC/}d "t\ƏKWۡG]mRL[Еjס IDW26Jp3ZMNWy|@&WaѐBU*nq]Ļ=pTSʼn|S6y69r\0&c|zsh׏ߴӻ^'C~Ͼ^pϋb~k{-D9E3e=!}O?:V0#*)dFRK6;˜od['[Ƒ=Ȯ k/9v֖R2|%gS4a5mw=yq9<J|X2ti0<%V(ÒQah}iuI_Zk0Rƚy`7]nФA%XW~|2=ʑhktiӊ*fհ:Yח܆7`\uvZe]Ef]V.H=VP4!)zr{m^\+MuzP}E1ܼ!g/Y#m.g`Oh+:>OqL/<+䞧IꪕB }X||HpƲo?; bycM-A;%, =\Hzq}W<|@~sJNL NnZF_L('_T!m$r8! &C+@Kqwo1$J  s= BZ.NW COH0fb0t$'.J=;I\aqMOim)h%FV Yi&]VrR|7fblWQeM(&LRc$Nj[nWWZVbltw鏽TIH?gBiF)iM_~|~޵#ֻdzeޥ˗N7'7˞y A?z UZ#/ ?_]qmGo>Wy1-g|M >] s%\ EKh%Z bZKJ7a)"tK{Wxøo[:V=UTiZI@q1KVMCXk X;OɝTC (`RVhTdtփOjM񟯺Sdr;2{\>k;/;ojq)փ:4[,9wG˳ڊW{ Rzw)uW=!g^1\3MVD*_YܺGu塛]՛r͎ٻ]݂[/WIZ7ٚۡ럑PZ OJTdqN9ޝl|E/Zj=tQNě簦.{(&C;HPS ^cѥ;8;oLJZXJGge-V1Є3MsI&]I =@X:(lӁrQRC.{ #rM䢖NvN9]ݻ`fbw:W$>@®#, *R`X6鮕bvjW]I*{ `5>rpVpb0t0TGU PqC$JjEА6D`1Jp ]ZD*$#]"]i IU;W{B+zo &j|>t;>GSv@3rlWvpّ]ۡU8tnk/JoAWz]]``*%r(t+Iv(`B]`z$=vӶ?]%ND;Yy; , :{)tCW -VPr< 4^thVUBIHWHWJu/*5U^̵/^cC]7:CR(XW M3o뫷o7|i xÿ~A>fdF}>4ף|>Jti0Ɉ9}cYuj-49>j8zp~/qヽD qC%6\ %a+%}{$R),KctF4y30_ny=pJC+f趂+S+LiQ* #oW&W7+MtMmGtqE~COcɋ{;PjT;,wE< =1ǧwW{ ִ\A׸L-CǕ鄫#ĕ7v#=[opx󈹤1nڧ,oUěɕLPI.0}yq &7ĭ>Ѿ&{ %pr[ێ2~3LTʩ>F\(hC`Oq32^+S+S WG-]b`&7 j;+2pupep٥ԆwW䮎Wb- eCWMnWP+Sɧ+LjH7+fperoWSe>σ+s'qY{ʕhOinOer7-:$mW&8fpernWVW2W$ݰ7Ȅ1<{|WnIcOZ6[4F>xLJAxiqy p?CO.^#u@],v??{oxM,~!?/Oho~-gsOhܻr~;BW׻]<;եg§{|^~?L?zq{Մ>*j.[P8R__3Y7١ #c^$T1- 0@xX牻ϙ)̏B0^7g~.ںO~w/ܝA*>5Y:% DRSpuP Z3iԨƯT>RW;W<~wjߍ~*LwW_ ŻWoG\sfꙇ*j.1.SST\ L1{cVQ7AJT&\ЅTZtG)vz3NsƖ2&774VKqI${hB0sf_ޥ2B :ꝣIl)AlguD狧Z' qi$60ИU1>lyJaIhsa1-B]dke[rSEGhGZGH}=f~(\YRQZF/9)֥@5HڻX/WBO L@ϮX'JV͋-]Їti8)M4 s#[J1TI"J_`],9)#;:D{|J.D `eohZ$V# bY*rs0س{ku8̡"i(}"{2: "[[ (f5Nی=0J b7(cdC2a E8))T ׮HQ*we&=tX ,0ua'ەC$T3r3.vȀ3F/ۨ@ CZSl@eD&\tɔ&w*jG.Ŝu,U`{<`a.&bֲ%8TR =IAFnL!OJ/0-Y2: @]TtgQ"HqbQ WYvxgD)J6􅲎>()WCUW"8]DW Qv`Jѧz*DYf^ZO?˕K@Ȟh#%Tk-Ȳ0VCAO U+cSM"hʲAIb# ;;Sw+ub,@T<5vND(c}o}BOugⅳvݙ<+o_c-x fHacK .` ja&aKp1åБJ ҵ̰֭b|LGɣ .!'Ȳ((xU(+"%Fy0z-Dm(2:#YAk<2]^˂1ZvgxT)*64x$; tXTup MȈU̻{doQe>enwj?;ElÕ(Z,&XN[: #]rȞd9(P"Q9inJB-|I:1\?kqEtepExxOfXan[RP2v 8gޱ#,uPt鍈+-6nwI,iW DX0$=( /p*J5!ĝc,#tVL/L9;QZSbE&"}G 7[Ģ S4]q(+!*BPʉ{֤ޢ+0=nKvt՞Ew4 FTfQ;Ov魻Ռ*B\ korwv2A G@>;>-h !مG :T !t3׺ n_o2Wo5sU&%ڃ{`ԣ'[ч4 Ņ6 lb7n;ZHu:tkiޮg+( !e"h^PQ\Aʋ<1ߴȳIj7LJȀ# b/vdsC ȘVOo@}a2l0}DI$L\{K$%@I .A꩔f\\aBW M ]]"]`]U,Np>t%gє& mb$`+!+y˻錇yRp,q(T2Peќ0Ǘ[jNTh+9|5-4 hmiICF@[DW8 mVUBY9rJ(AhXUZ3gjtJ( ;~tֳi_z B>R0-K !A5_*qtBo< .WB} 'Ӟ\/?e(LII!x/޼\2Y~rдWE1- (yye%fUN, R6o͙f'% q} WE QxLw,rBWtM>;3Jk/JDU}r}cyV2IȬ"0'MXuTLeYDYȄ'6enSX`%,hZ>Wՙ*s@g9!sGNuZ*VIym]l׶ <τ:.Y3!Ti1E.6Zb'5.6Ռ6N(9^4RJp![CW lFBU*r!]]]!lSxHRXW .'mV4~#Trteb0 IpekwIhKBiҕ5D6I& IpikV4JtЕس10< =3x\sn: -硫PJ,Е@:fܴe5t Jh5i:]%"]] ]1j5MU3CY7mdҮ1ͷ*P(ƈ4g_= j M'}9M'MS"M Ms˙-+%m ]%ʴZcNWRtut%bMt[UBo]%.MǥklQ]%5K 8Ci.4{\ BNW %eHWHWр-"JpO=T]ZU*HWHWhfo$WɶUBkOWRU/%9NWtu\Jμ2xZviCY m]J"][4Y[DW4mdZCW ݇tP*tutŨbZҺ5tȦUBIѺHb PeU TX5rsj$-i2\B -M鄲ifHӯBӂ1%dU`"'8孡VUBٴ͐^4nh2*~!㔫7#.,ǃL~AIlQD5ĮZ[l95r+:?+Q5ZB5Sl\<ᒺ^eK)V/Y{=8Svnye >DsCNn?P+le䌭^vFnsy2~@3mI |xdga_CYzy.?_Q:E]=Isozә J:X4n+$s{Wj@Mkrw~.3ԻO$40}ﮇ~݉+A;HsrTHs1z"`,PG G-sk"&/ VT\nUyJ0?|*0~Ƿ\ 40OE.-zߧXnbeLY?#saZ 7qЙBd}$^gi% ³?Jy24~T? KUfW}]tˑxqm20xO֫ೃ4/¶}7)mjsM޺*;7 z=rGO?Oz"s^3GU~f:-'Sv}o5/4ޮҤRo߬Z(J-[ȱ:&Qg8ng1{'{soO24Wvj>haO,v:f:Mה+hj!%3p8~K$e4Oqv;_$ r-8 %,K:vJsgz}櫱dL:--I,emiUƫ?XH3CDt<9si-Zz~=ޞ:Ew@f'08du2q/À7$N/ FSPB#'γ9'{ :* NݺaB+jQpC:$АaGs&)+79"Y[3yd[m0/MhHTGvybG>2#ºѳlLfndc5fA2ŊvgLا?e]f7v^gdT\;4;Ɯay58ÔrB-}#CGgyk kS(M ;dwC2zֹYbܛ -7go(X+VX&Z%X+1 ֺ`-LI-|m]{>+8-uRѰo{j(r*׳\.k=q~˖ TKSI5d q9QE-"qRLȌy $7Giv<yPJpB"EV5q[Bv7;NƎh^hM ȤxS~0Seh߶E>e ſnc ?E?_6gz̔=NqL'Ы!-dAQ,yRw| r7'p/a;-Jsd3y9]I `4-`OHxbH~H|^) ҄:"4LL,w;^t/,8;Rq:f/pvٙszq.޵izs(DrZõ\D҇Bi"`,K-哆E/*/x檆4/*sE[3S)mG;YQj(jk,a K\Wh1Qx4PG,p$ZH/@_|&q:Ng6֦ɞ2'Q)ΚsM*jGR "籲 ҸmUI?r@|Ϥ6T5=\ڐt~Q$Dg ur-ОA!i-^6H}; (, 95x/]$L(,su9х 2o/p,絚pj7ҙF*D 3̓pBpJFbPĭ >A%/7ɵكTg}z{OR 0OvMAaH nѺRwl1Z >k h^^ЉfiVIO+n":y@+7iw9'q/"Mc1wX+nuSǥo.zכhgR0I 0XUۀIYjG0iI5v7#gK̰Ԧ{/O܈&^Y6 8^HPv\n+^Ύ,&"Y dHcdCɪj[ !zo!8!1 hŦ~7x?M;2lMLDKTdFG*dAyUS::ˆDdς *h sgϓ<ܳ_Xadѱ8Z撲ˑ zcJF[J1(K^&=jni^,Rx5=\J`OXLpT;iFۛѱ$~hǹ-;yF0 4~P?A>imȧ2!A yp&ޑA~:|+]Z3c (ɭ+^q|#'$<\T }?jt{%ѿZ:eQ@ +YKQĭ)hػ IhK'yw異Gi~hv{u񯽋+ \^?xu0`4Kb>Ɠr2~w<[[tk{92N~ί{'/frnLJ?ӇUȕWNsYްЦ<9`]źNN =sa|ጆJ~CuZ!riu煌ԉ5_NcrGeT!tu䟟*՚A OfGq|v'VGo{7߼x޼i߾Wo߼w? \X$=?||2__SSyhS+hf^Wy{0;- ğG?\$};ƚwHO^ Qne핻l쪸#{plh6ŌKFvhLl2kTBsΆz}{[l!1 B񙢒>bV"Q]Wd3&g:{ی'v6w>N?};wcV a;Vp<^BF 7{LiQ^>"ob?}mf.<+Zq]$=Jz`)3:(~_E|yq4^ynt}P7י}}:ݵ=[y זԈF?>e?_x`WGget|xѫʙ^BF^-C׃,{x0bn]^/I=;?8Gs?_sX9:fOvN" 4 SDZP"GakKr^V8RC.<0qd k?a,2ӌWMQ#9mE!sm(gEZas 0CpX"RrTK_w4ql3rVJA bC(& 5L !3"mMv۶ZZ,=$ʪ6P4*HDעHr\Dnyxڊ;N /Zw쪵ֆAka/IGe%t.@@"p"Z*޲0NK"8r@H>,ڐjپlb1뚔M,m4l85'|HE'Tg>lFf}؃E稪E#6]5lٮ1,ai,&5 I <>HWe23%IE1w0L+)Y )I !a12(Cl{Ќ̯:^.z^A/zq a?HUXX)pA`-aHc( W4XJD^| /wUc}vӇ64z5ȭ1F]-`g VVYxǏgE#"zI+_TNDI!{W2{{0ǝhi-H%cV҂2:-WaJ{iiX?Il0a{ϸ?!FtǬ|0|~v65kI}qWChuvzdW,[Oww|wnvy9' .Pz|֯f? î[OFyT-Fqz~8?=]3*Sϼ orKhcu;Ϗ; t LRYt$ZJ# T3 A5{T׺3, d b ȄbHlqx?Wވ׶01 mT.*6-Bʞ#9TlDtFz%)n}|h} ܇gs">}ayY͸}S//g8`#ߎ'.DeoU(TcZK)AGǶ9;=LT ޙVq*_l##WBPɴ%ףF Thx܌|q)<'k_| !B:ոb@τ,"+#ӏ\k/sqwv HDl*rZh*N%:!w8pKHR~cK- [b-w=2UYX3a4RkF*H6I ZDÚ Y +ǖvǖg==|g+->NlV͗R{>$L"j@2r:2ʵ )B.ޣIgPHH41%ETᔕ  ^z$x>n֌B=VW U:nؔ &W6x&Wd#L>v`/ZtU6fn[L)i2=_H 9mߝcJ>]= V9mu]RF A>'ɧٻOڣOڏIU&.m\ F9XEBd%weID2B`I;tm5twKC%gZh;j§6@{GщYo2[V=`b>IfF&& {*ڱaɖeWO;oddϔf3]ny4ޅJe#J&;j!Z^ E*5O)Cc@8V j#2%0مlII2+@A4٢-=2h˸䕶Q@~y@uSB"E5  ?#%_}B EiPm&CO9;$f@h7H.A&,I/o@#ءd~0=Sٸٗ#$ `F`UaHB%QmL֘Qx9[Y/Wb3uʗ% Z2#U:"!lL$o-IǺ6gtDo Xs*h)޹Fr+4|Hd_ b;M|]bk=U=ͮ44KMXkgz4"d}x)!-<=VB("oA5QT<|[tjO=4ɿiT9:XU]_Cq㷴0%xc[K|@6ہBL۴-gkJ[b֝Gaq_DjH96ݪFZ Uբ`5ݕ*Kz9΁o6(UUuڀs=M;)Tз4U]_8p[nM/|vk*p PZlJ_mCڶΥ-Yq f) |\{F_P|w|MgT\ЂW洠XFwM~nڍf6TXzfۻ{Ҷ,!QZ*m\M^]74YQ<5Qm3{è6u%lՙRA}m:DMt؅?Oi5u!kUҥj=blƺ S2eDBJi;Dr}-3sy |p_p_!q||l~ڣjׯ1wF̡-:E 1i|F hjqŽ=@dRZM**[Ygj4oC6l@PCH:x~o^pVmciPW.7l]qOi-<Ȕ֡!غ)w.(Gbu UliMwA0*,RRRR>=g1V6Wo)*3eUIsUԱ uYgaUo*GظG¨FbrV*~a}Q.yWuyēCtTUYUu0DPM۪zNsjq[JB-Ewte/Pq`I%wiR/A{ZۀMJ/6.56XjZݺ֚)j%U `weE6eg*/6(wnW*zkU,cTEJj<,+B`!R]q8z_:$"6PK<ñ_p[%i}"? Á @V4^2q H˴hR˔i;銁+ A:2)˺z1r[;U] F3"a`0Z ji Е˺ڵ5 ]iN:)b]Qͺ6>  g~*>]7ʳB뵱~n~緋833X({fLAϊ)~ׯ ] *gLgb3d]MPWѡ&2p9͢J 2m(YW/FW~ˢyƟ;80\;a~a]YW*z+2(FW NuŔEWYWGѕ1F#;_Fi#!h\i)&Zk Jki {9` b:D*)ͺЃ w>T}eS]ɮJ_6t曼EIQTV?ݚu99 4g>%zs^#dDSw2t2;ľ}tb hP~_C-HǼ*n9-a,bE}zfğo^t:ݬla]ìRUUUhpz-v59k\_t{US UMrUcrcfPͰ˦c5A{[{KYJvS\rluU:Ӓ@5-Lih{rйwIJ,mrsA"ۚվFЮ k>Uki;C[Vt^%HGWeE:k_PVBmotIuak7'fZl8g;I[}ͯowquuoe_UF˗J/ǿ:˗}}2ћvUרvkfTwYp9@ꮃzsN~kzUoأSNt0nD);Dkzͣ;Sݱ!jA F3 )IE+6 gF3#i+YWSԕG lAY)bڐ~YWS5ߝJWJ֩uŔ^–ENR֏?>S {ka8ja  YWV{-HWFO; h)bڱƮQhQteEAb`/GW]U*u]1YWSԕ1јSaAL< (q * Ҡ(ҌkLmfh'hipꊁGd6 [)bژ|(Q>ues IW ]Q֫RǬ h cpbtE։aCvDU NQW JG9<$Rt ( 9%EWFwhmcWLج*Z z2HA^ʄeSƅJ8av䱫a~akI*U̺ڵ58H`#AQ)bZgRS)1;;VB34g`N&:8=vyC ҴAi1Q%ӎ}e$5 A81bhQ' fJTYWRw%18zRtŴϽ`x]1%)ʃ_!&<`N&%M&-۠3X_z&eQrRD )YTsJ4ʥv&!4~lمXj b3b،.6ѢRwR.NS銁#0+5|u)Tp0r+%EWL!u]1 YWU0Nx72-g1F1"\-0GWSU4!z#HWT+1٘&F)]ѕٲͩ]WM; qlhя4v5ҭ&+3@W&jע`тt.dpm(YWԕ1r'EBkNpjzqI׀ 4iƵJ5͔>fMOPӀ:+ EWF)RNu>FA"V]1K^WL1j􈁀]1.8)bZRSzu5A]98ArVi-(/EWLkAu孉ARg[Pq 3K(SFӏg *Q1ޭf8oA&`#i5QBfԒ*gMEӠ<+µJӦ f&pd]=߹!v|L1?UϘL4ͼ+ ˻߼yY:E*_nԾZUA{o)~WZΦc鐾7z+/.(~.$*oݔۻ,ZwuVAxwe}~ۆo7[|s!0Oqp> Q^7>hpxG-v%b~6EⲤ[ akw7)Z(fHlݫv9yJz]>fΖ?n Gڋc0P7p){3J%=|K(JΚ"L鬜%QK|r}VB2%ұ[R SsV妬Wn"R͚aM\?Ov:MvbN+ hX_[Їj(&B@|{MP*Ɯ bfRZH~I;*DKԩUU,FD] 0MQr9C1~-6ڔ,}j!WWTs4sc}zonf%:[)wFJؓ(-rϭߣ>y͌e 9\ scK5c}+TUiԒϏ=G*Z)Y]mfȺM(Kc 7 bʒ&PdKX*0οϐ?!KXh*gs#Q?ΐwUa}pyĨt ɣ A>2.DIگ˾y}kU 1Wm^u*L%;KT}3D> RtޜyWUT/i>s<:ZIͺaհ)=Hn$]QcNqI>I5j'-ؓ9fqu4~1`یU9 ҙܒ`/DmT/ *Z}ƥ/-@͂$.FR-+搌wr26z=򰡥vnYB U|n` &`љJb=w >B.聾0Q9_|&THdFG>rI zSNcG3PQCm>+h-aC>83 AL7ZuS  ǖQ[Ck+.L-a7tGV- YZeco; VlztF)?{AE F@n7((ʡ7fcO $,XY_tawTL:TjMPPg"52\7T r6ЙlC@Wf7M-VTLgqJAQŁ.єfՠ-:;J` l濵BR! ƺ8gYƸU]%׽6SGAyǒj0 !1T(`5Zѡp`Yl,@'@Hľ=(aL! ∽}Ay!}p#eP/,ԥj*A Bc̠JUYa;i #NHe]L<0ճM_I-|yoTUz[&!jhX ; b{PT8xiT Ne&Um jmlAhLkuLΣ'y.|Ѣ`a {=dHiD0iyM.}b1; ;]Kk,30}dH֨Vx4 ;< cx.puaQ% 9ա5PwNu2uVH 355KA2ZuB`5_!]tf15MVѠ8Yx`m@ [SW\Dc,e"En$l& -hx5.A r7ǭ ]el:X}7=[t<y>i/Os=;,6P`0u0 gllfѳq5e'BsgUCŪcԮcZs5|5Ǥͨz; ;x .dM2~  [Wϟ}hY of=Al z1m?G7yշ|6O|')vVZ.ANVC5zr_LoN7oo O7?sl^_<{M8VW$g W/gڜ]~x{;xzT:6Ĉ_N/|v1nzYm0H<0C(?%*>$:|9t0I u';I=$q#I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$MQ> p@g& }(|ٓLyC$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@zI 48t@ }R`@@kg%I=$P4)IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$n5B@$؃Ie@@i3b%$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I IIH@$$$ $I II$>8Zgָ`:z~GMyq7!eQ\"&#؃K@i|ѓ.$njsznvWm ^s;G!˫vڵ-U೏Gy|fNG3ئobrّ{XpӡislE9墱dWcnЇbmtb3b?A,ݻD9qJp}E`tDNLozڬ//Fe}v_C8o|-ۣUjߍ_{v9_l޷GwIoy ~_R7 j}ueo~d.f@o7d=dHw<_&r%?Vh[- /[~G[0ӎN7\L%g;g T\^?zǚ];k{ߚ/W_N;yʭf[jfpXyyG0aXc_!ORAyK|@;pF;]1J ]EmuDWl~~]uBWNW˥HWF.]1t8f0iJ>UBW4tE\z:Vc9]}`O1]}_IW6Fgy? uQ}].6DW 8ƃ+ ]1J]}Jk ]cӯz)te )gBW6>zu(Szttxogۿ[$Ⱦ'~;:[mv73r'Ξ(TVGf wJRƺdy۟=nk|0*p4C 7 ڨ=M3gZx۸_EGa7-4Y,M`Y%U'΢}gF/[#1x מᐇ9C,aQ*9 Ҝ \%qٰ$$ \=CT"@` N>j)oԣY^L@Gbalߠan5xQX߇Ay-mYӃfr5ϧ3vRٕץU)~>q!볻1|sN7fԁ6(/w^c52v3H&xa(Vz~^hܽ ,N?펍jz=O5icژ6T}n{x@:oDVğ˜$0Йsc)H z՟@^&[ ? bvw3^ >ڴg/(#Pm{^2F{R.,.Z# d8D_hhĠ7SFx90'[|AJ~&$yMVNM"oL)Bp{ez &?_H#k[ML[G)O[9>5޺,QO/ܼ%Zִs.Z^oJg#k\˃xki{Gg.:6aR/1",Dp&R/5e(.`$x$EV D+NS!Ҷwh@oUW;DWK]Z!W;>"b7u1ޯkc e 5%a4Ut+&9NxQ=^$g'A]8dY6ƅy" |fBM.LsgV:(F(& E2e2!QsLziJ%qg Jf3NU{Mk΄B8Vgpbǔ" q# #H94ZA:Rb&HQc(,<7 Gͳ'kgKa'4_կ{ KkI.8D-_DNTיH__Ī+}zp։%u*e%G3Ag dsQKs*$V`PAHXbZQjD#Hv lb(|IAz1P;xFf2-ֆHFǤ7(fEwZ`!AyyOB]1W4YH!lѠkuXQ }I.j`h!p81&%^p%L٨!7$miC`<>CpT(y`FqO&Z XAˍX²CאrFݾ}V1% 8~H^\ a0Wʈ YS,6ի ij!yIWCWI]o>8KsR1ځqr*bf>aVE b.Zo|Ӻ2}=!yADhniC"{*"C]43N8iB0E=DߤS6qxx=fU`eVE-6`M@,P:$UX/n#oݺ<2iPO`X,ǁ ?:mkhrS2..A-壌bɰ Iov["g߫mr^Qc  atQn:[E4STo~_N/}a0xst.̵^m^H?4/A oBU%i]YK*T#,fYYހ-Z1~vEdCS>֖JRg* g@楎ӤzH*|?qh b+FqkP̺~mTNȬ/^\wxW^~wz߽D]7?`PhWᗃu ͊mQ4uֺlmeuV惋JrsgCnn@o4߾\{\H -Q_ i 䖟}b_OVreTߑT1H5?^-yU  _lVN>>T#yUH`bBȼD9dwxRp')V $.rg>4>cO6 /˭)N8<2=8ntrNvK)"B/>P+FcXEZ'&MuS˚8Lշ2Zfytbem=ˀů!;; ׽]9MoܹSV5a;OC៨=lgmlل&Ji۰ʏ``EO@ P0vY4a#A29p!Jx-&Aٝ Tc󈫻*,Ӟ1ކVZZB5ti#9iZ_C#(諺?&熈j[5#ި}Y+\hWB z"űXYn)O?l d+D(vP f ߂![%u} :(\c!bR# K0>@q$HJuNxH)7Kp3"'DŽ)1#)qJ7Db΂wD0YTm<(bodfݼ1}Nf6o~覝۝[+s/IoIn(HbGIJ^Vi(aFY+aD8"$]{q 0<0P$jd {% g&̃Km`BD(7Rt%:!o*1m%ܿwW٭-H92"B"x9oЊwC,آ;TȊFDn͈B,3D1%N:a,N{0kxu~J-uO*vEQ1,](칳* QGޕƑ"y;F@cf1`=%J۳*p)dln93$?~d0HXsp<0.yV1Zt??يQPn]9Hhc19{jAF5!g׷3D:d6R*^W rL\+Mo54˔EeT B9YUAYtsRYB^QE/b`gcy cuX<2Wn4P7ա15-ƸlhHZ:bITCT_9 ku?øJ}qEgLSvST}%Noc1%=3''1Fto O.Zy16Nq\Fmw`9AU;$<. і~Ao2~k7ߡΦj})ƆZ*>Z"2**(,;LJ'T]uN0@H|KVչC}\2*JZ#Z|BƯJO&_k27gG3Q?S9ӱwz1R%ɠB|N "N~ffj7EeO 6Qm:J[ $ j䨒%&Bj] 椱.ŻjHQ$J9qA*Uۚ5rpc4s3'DaT?z8OoZ6R0@AImL)/( FEzboG QGjH :Nrat*HXg$.`:j7qvjy9xŸ+G'VΆTΞ]]~mXZnѻGWƾosxwV {4>RBܮ{Y3#%e.d|c'e!TTrD<.fPV#E66#p&(xCRHZF Ի8{TyQpqi5\g7-9mg\3.θu ,Hm \:D#ڤj5cQ:^ V"g\| \ vӎC:!Oa;5.O>rg6E,F,2q"O\P(ނe5/O!B4< 092GS+s<=QA/s,zkTRoُDCI*,6m<e2e`YۃK]{6r;nߺgrh^Mݽ-~1OmoޮWw콿_;O;n{_'wsv}S]B1rcg(y>[,SCwύMrC܁#7 B`?Q|sP|؆X:]͜UX4TD SV^-Ģgt/")!t<^#T[b{7 =ezKQ[>@>fLS&?>G>CQC5q  Z?yyh^MkC ^LU5ld[L.N[b6NQKiDK Psj3 Wm7 bMB"NGn6aՠ»-緵K~~3/ʣ8C!r.F־$-7T9n(>8,^DSmcȶ9L67~z ?n8>,1\:G* ] :Ā_8B!RCjfĪ՞W,ƊH  juZtJ0Iߞ>Irvr;,o\ 0BJF܁A)cP]JB1SEp8gWǢ\]_LeX78p}'~fVmSQ Y~t(PŜ)bUv}Y=D^b*ME5>l|(Dq1b.BFkL m&dJ2䰨jsͣyܐaJC{iԬM%Aͱ&><]F.Kn嗤D՟ bg*6*^l~)7<08Z48 @ x+fn@!3 *yaU~<=AgqJ;4H]]o9H,YjB[\TB ʢM[")Pf]m,F~J1,rd09z;Snv_Ҕkv2al=;Үdݜdg .jWj7TgvL=;78?-kr4j nAM߁%Z I!j LB3Kz K\*:\]k?F}}u[' [w~_a+OrH?my#~>:ېjk3SZ齯8?ŊM_rb}A:f:5&{&Z۫ bý]zKfnTNl*;(ex{x=COڼ~M`o_L&.~MZ Sw.R5;  jbZKRI zpդtv7W1U8W"URIxjRZRW& W&wյMg8B#! =lr{X%$L ߯j$E#ivuUuMsBII˴+R=]:EX  Bv6m+DYtr1+Vj .ݸT.VDW׶me GjL ƿ3=Oˢzs zFQB"B2.m>ݖ E;DX FvcoOӈZKin5]`Xg rBR=]@Bwm+DiDOW/vgUw䬧HW!# %]!\B%ҕ.11h(%T%}W/,L;;hW> Q>+} B!5ao0Ͻ30Rvѕ~]鞮NzJ-SCt9'++DW *vBZt1]`tg rB=]D>mY4? Pu%PVKn|o4L0b;D43F04(iҴ`.IBO!Be=]D67ѸR#I5M~ Sf/9)+?E1L$uM?y|uN L͉8-XC#n2-Ӝ0L}:H!YB |6KgoTOѲp^cj]pN;db#`#;cb#\ a1jЛ/V+:DWXsBun=]!JK+ͥ%] :\ٝ(>HW Ѷ郎_"]n1)]!\ G!JчD' _8xO+K;thi;]!JCWȡ7n?A%/PI=}hTeƠy]NzmJxIY>ynV_9p`./?Nu~5!)³$+fZG!XǶ 9풵s\%Q"8 ?s㫽Yܚ=mHhEc٤NQ!ibĂ!9•$V[0pkL {u7.2o5BXp; NgNX*l_Q}X<+ ɫz9+Wߺg`m_>P]CڠnzOGm NFW9fCe] c@ni~F\0c{:C_˫?hp^WS_]Q$,=3#[ -`2#x,K,gumMgy".oSpݨOn[wו VrEOx??o.{;oZ~4 '~5FD8Q]-GAc&OS?n9ho_UE.5TbzuyiZsog-Qob1ceVDm.DpI lrt DwPnbˎ+SӇi)@(ʡcR]IK2F":Npolg<11i5eGmiDqL '˲X]6<8_OLեr%Vwzxl}gd^.1o/mpS,"eci坞X.h^S=TvU!D 7º\&,'FL1F'81^`Xl6* CAp$!HPGMphc@z)\:t٨18j ?텥MB"!Ċ,PdP9F9yk] $fi x!mXZ c<0Yb>+ R=No6eeT nezGϬMF[f*KBg:OybLFNaQs Rqj5qzSم^kDFLgLL$tף[{ ve}l١fw/ݯ5z6N>mԊmV34ؚ[Zޝ*Pe/6x5t&g5'07^zn+Mp1x?&?N]\AO1i-n/6º~e] LihXJPs]jiWOoPRbEqY VQ|^/ `9y!hꫣ۴|pjd16y)j+O;(э;փWn1_f ǩ, 846Z:*1; _qvS?ʴn5i~K`~#==G^v yS^tKYZVT[..W_7.dJPVĖ%\AI4BP8ItIT8ܴ{Uvm"ٖM4ZʼpKi h7@PB 4pҠ iNi6l'Sr))!jS^Pcp Lb:S#u;˝^iϚI7HAn(OSWޕ1eվhoRCi "$1 KZ+J$iΝ\Klіm쮫-Z#vb_~p}ˏeExD\LяZ֯lj~\'97=} s-j=ʻ]6=18Z 7JgYIZ$u~}=P{d6h|9al54 wzYpX$w+B?/stXpSitl+9Y,Đ7|uK\.of._`yS S! ^0N9/Sb.:γPn>l? ~ OvɧA&Se rDr:_fNU *=-h\Gk<1e+1+AF=o1Z@mhKԾmJPJIQ0TEP寯S2J;/7;] 6gk}KBuc7{ZcyzGVݜ` r P@)fMUc VO"tcbYpНJ7e5#7 d%; W{{y[Wy_V|O[=]gcQhwsU=N^&e~dg>Hh35_=6ajLqS:=v{IXzo3yޑ'B`lgN@OpjC'B Zi~"Ԥ?A͋8EL8N/P5xpYDT!FF2o`<nj[z44d 0*P$0Cl%oڇH1<%ɣYe ꡻[!f1\l=vi8>ݗ#"9ŵfXFgɂND+Θ5e`frc ^6*UxgU*QpPq6D=49lɠʁ4qcpzW Ǹ)YR)޲|&p.mpKթZl`haUIⷡ5%mK^D$MVIeѼOAH-%imyR=m]YSH+880ٲ݇^۵ٞ)kh aɄj<*P$BT HI)$?>c/Z-LJ3 k6Zh2AjE$pEbNբa Kn-sߒ`/نܞE`Gz;1Ņ1ν'!FD9yMe1 )B.+ʉ%'hi(8߯{ 1%4TJ>Ă&/YFz$d>>9 u]j%JdgS7z&b4H7N`w-?JbgY o}7]+LuqgחxynZ6h,\s.i,;wKQ&MlږFbD{NڧܼAmo}K5r6GE1yb`N*!zY0eRȱ>>Fԉ.p^Wrle*^_խ˼nF'!O.HOgyVVmBm !wŻbziQ:[i ^Vqv_o ߎ;VG5?I?w[LAf9TyEkp!b;kqmuu|ײCm_B[5:%4((T{8,UYK= (r԰T+]nYRƪ{>7@PύfXb.kQQư㕼v#`a61$y(L0NC:nܓF]Tfjcl+@";/]9Cٱ}9/7*{f|Snk/>Q5/riCD$ii$h |B E> 2Qr{U'fP$ JVN*)b$GiEE5|ba,('Q(l9uJxzQZ;QO1*JL6~eG` 1Bؘ$7 NqұY9*k%V') /z_$eeR($/ced@0۳d-O)d/FԊ!E'-~;||z>oqgơ@)I6e&#CX +(Y etoz84Fߡ;Ҋwy$DD7ܸ+?А ̎K=獧Xm5l}4ZlV5m6ܰ:ȖP؍61 "mfwC5y8fWlρ E(k. PT%sH*E@Q\z<}ԑz-CtW#oqґoMk`lg-5#*|#TH|GCj`-kCj+ːJV*Cj[yoMoW|wˠCI"7J`B ǢFf<̷ `>q2yc%kܨS9E!4Q~>QUJ =[4$W~d|\=g̏7qn^5!M= wȸ4t@i!r4)[#T"JM:J,tHJBJ;jhëuÏIV,N !:2_'=ļo:3eLfb"FL4B2T8's};?KT= 쀵O{!,Md+JJ{VAhl„He<} c|hF|8XsQ3ГO!@a&uI@Xl*pHs(I /*;o!]Gz(۳;"{K}(*˃.]Nj<8Z#PjX}p^j 3* 7>E`dOsT/W>۱ Nv+ڪMo(~nk(4}%Qq~2{84>|<[_uRKh{zȹS1̊j~!~Z'?)hm̫i@Uԝ.dodv*r GL4M?竛f?uH(l M_6ʹ2ߖ/3஧m2A^AخkkXkz6?uLi$ş:ng?/<>pc'gg,UiG1|>;0M<Ct/7pPt7@·MgϜ1ܞ/>|UG=tgNǤ݄wyjTf 6}\h//؞żo(eÙD3FgOne9o7<%{:UW 2N$=bl0EQLEmY!C1K|6 e4x\2g 9:RvQ+%MK,&{c ^/Jr`,͠L{vR=wR#.p.ez(^_Ǔ oԼЪjm h4 4a0Yٱ0n.^z'vh0X56FY(Eo,(CR&(aOd"VtJ q]%6Z6@`dd]HRj^;"'J7rvIo\{ίnVPm~5ؽݧO5Bغ-姳~uuGh}` ,xm##wzuwɻY%eZ6[ Hl0FհKMu i |D`hu7yZ&˦>qc/zxȽ;w<.҅7]oc3Q{0%+19J )NY&=ʗ:M}Ӛ䝳9> vef4M~ Ў.۸0 C+2Ï͢RM%,shX҉n̰{wب4 (*M=E}gOu{uCNB%2-cpce'HeaqTEcϣu$c R &C6(EY,6oQ=&-a߳"gGaM4tx-h(Ddm"2:-Ћb6bEV* 8h<#slxy7cȊ )4:KcHƲ]`X@:, = B֊gc cP5Xyf~^XwadXlZd2!]JNo"b&kR hCqD# >O4ߋ']QX eԡHjOIb"_ `4u(H >1b Z[)Vf"C':"%]y:VqKݫ 54Sk{7_yuvcdy>ggikLD#c~p3r1{"Ir\/s2i7y'8Σoq"ݑudpw~C~,4\¨1YG`!AFxg|.?kV[2U%g> h06?Mn-&7}v]kkt~?_T U_ϓ*yisɗFAp8dmE}w:/:WH -mH[wvG}T}/|<ѓ5{rÇM`51ޜZs?ӳųU˳LQ~]koc+-&9 ,t6A &(va6֖fSã/m ]9. *k&ВnOʞN뭦k^fyMvvvWwn A @X[^Oͯъ1<46|#Սu17%)&(|U:iv5Ej {yJ;0}O;&}$4q3dO[%\(r)<'&A며2fW7_'ip>]8-uxK,iE% z뿦ܢ BՏOFSdߊqG^ΆӠL-!Ww,Bo/z&%%5um}@ \9>%bG6![ vZtQ#CJP)s26\Ř!sA$  90G9dr&^ kqj搜& <{ ϟH|M=]\*i09Cz'32fɢb.-/l@]d2y'sp2voI[.w7X5~7d(chj3iخS*0āi^i$Y酖@γv&ΌUoK6d 1/ZJB_5D'/5C'^'`/1to<{,_LWp6_yihavquƍMpv?`W{eհ?u{j|8 Jԛ1γszO3Qor<گ^jL\r\w/ઈq_HkI3vprʀZN@t||Ѵ*y>%͌n9x!9oIY8|ep :$k6A#%Q"EZ󜐤t AV\U [pUWW"iK[ \iu*R"tp K}bWE` \[UcW/,ĮO WE\vE H):W޾b(BD٠͓&(I|W GE-b+A 3=y=OѦ]V6vĵϑX!7o4HcGHҋUc9ʥlCKVO'R-?dK`C"1P6;'*pIptZ̹Tm l{Qj=NZ!=NJ1fAlG~)Swn492<\vR:\VA#FWȏVhDVTNw#?brW*Э)ˋ@qc۠y Sm*xZ0'*V2lStId0L'l*д+;)f!.  M" CbHUw6FU7d 0IɂcX:wH߭60 ݻyII.)tl ݋`G}(#u:O\ fg;jRƖe1">FiEv6]w{Ut%Ԯl)K}bh0Ke"EPkNji$j0#Cm I\JZe =rRyo$hM#w; *}yOz%dUTڑG}n]E{L gT & +-bV\XSNM+՞kنSLN4IdLy[ 'j5qeDK?Ӿkw.>K[% n/ڤK6ZoՖ(eHZ=-hK߾/&nUT[=ȣtRdQ>)D B瘂(󌾡kd4cr`AO PĴ",K8WeXMWWXfXbL2˫ŒT_l+)14 g|CNG9bK^00>,h 3#>edѥ9جn5@ȒC1\$ l6AKe4Fɶch6IsHw#js?bA?.մc]Qw1ؕaD<0#dJXyްT{EpHS+5/Wìq,db\ šȚI`D c.ȨNƹxXM55 0LY-XEeD"v4I<ׁXaT4ya3i05dF mz1md) d3"%I桁!%-g ‡ڑy@IW Gdr풯PW!b#+&8^yh &1goͬYBu \<⡬r=<2U2k`VQ)SiM s|asenPz!mEf?_?3<1$58ݺJrL"XC%XF%&tY;qhTSEhw#zx{{2L@gpqJ+yI*7j]aOҙ-,ëC1L&dž)xhHA2I90$Zj.ʹwclһD\? g!B(t:w@/[S -~GTr/]Ŕ5KG,&$N Osoy,TV?MG&oOp_PWDʟxZ$ga!ď.u{y .[.gPsOk,/}w7=G6]K0rḧ́6Js鵺$!rH񗻛ݫnSi ҝ 9Qxxﭖ7$aZ^s> Mή3CtAhצ{87F"L5ßЄEw7K4yJG NDB h,4S8U2B7z987 Z\GMqa<5yR-|ty(6]8%fb~bDK; P(k-^ } iTBZ؄nCzG'ĝmEιjʎeD֚GT81WlMP at5觐CZ)TY }L.C4Й "9b囹E6yvл\ ~L'܇_&Bv;fr$~]O躧8]]Yp?%~t29 ƫRg "J4Y`3pd7k9{,@ i}YlAK ]P >q_ĜJQ#!ThRjګ&3NCh]mNڜۦV,+:,K{T l-ݗZ$-RjZRW0P]:irӒޛxny?>wkV:h 1˲9Vq˘b|M?|7~xn6z>yo\A *t6a Kb' Y==cu`wcZ*Z*=;i<(ԴR(h &26 V.e&zEP3$հfV5Ax*+2zX\B{e!Lʣxhzzl"aNOC8uA4hTB(tvܭow!n'm.MD!\RL)B@&Hj lRj)W/Sa+\X/AdUQ孡͘> -]am \LTbolAFM!M*>UBFIhgGځO1ʧ U)6jŶ["`S &CZzow15dJ\l1֠j PLީ@5B0Yult;W!Jq "Eytɱ  $&Jym]{8)\UGZ׾ }heoIgJz~RQt CdlDNmZt]tt YCްcKJ1w3\tk@,ةd2ĠRvlݓARFvCъrZhdưU`[I3cɢ4yM)Mg.bCwi52kp}!x'm_|$sd5$E˯m)!f*)6)+V"꨹iig{$bFV J(d7 /zH9ƈ-FY{6 .aI:hDA֣3w.Q1llg BPoT9+9EpA,(숔ZIN\NAl- h FR>_-3˿*:/0p %U_K*QsJ8{1$jZA|vK0Iϋ?I?M3wݱEe# \wOJZD@\ -ٕa ǜ>Jd= s+!x7j XDʘb_-I Vgd+cSJq}ۧSy]QDVu_mpŐ兣'-5 fmxJ 3ĪRFN=6`ۥIȮiwms<#tS6t޿h'낄䙊Mi=޳K0&_>anvL2bBCToТ,_)7<ŃgM|g ".;!gCMH8I^|MU{L Kre'g yd4m SAVĄZ~)"(dl;aJ z| 6UKYm?VbЕI$/|9kbh$qs# £#qC^n,?ꖜ4mˁ%"t 0%BW)N~DI߂KEoiSw^z/|e[?-mz`dC+쵹DKy lR(XXtVXM8kBdjh}`/uAREeYò'-!M|٢/w_'^dL&-yaW}~n5;ۏK yl#reKQE3A=?^{/M<2K<'ћ?4^J+ԏ>|.?#Djע戯nqmrKa>e>BKh{EʑsԠ qX&B !7[.;.A_ʿ 6iof/hnj3e\Ё C{-rtdvs Qc0 ȩW},JFwG> η.lZ)8(!CC(AY%ETs{7tvwCx\b$N~T$0ņ\cOZ5.!oRj& B m_ ո59DԌ%n5S0b xj4 E4 i'&w_}\>xQ2gWmĿC6@J\N(Υp77⬧ZbG=k\QRuل\2T5@'؈hТP^1:%(/Z)H*C59G꫌ # s(Qo@-5Q޼P[۪[j&|Т&Ng,m,t-'!=y{ao^o*Ab~C>a=۶л׆y;?O>Y7yU5;OeD$}Un'A僎gx*|m߀@%dF7'>9c7q>tL > ^,HU!Aj7h eEo7-_MUe&A<;kp7DZpDyB_٩:9 #8cNoΎN.m Bá[7ydݚor:NO'>0ݛFz?tO'[jM\<>>xw] H4g69OgpqlM8#Yw"~{ȳ?R}#;ڛG:FsQX~-<;^'y)6/~<;~1Gӳ[nYUG]dߨ[;T絎_Os9O뤙=xe?_[.D&t<<ӓ"sw>~?}IVs;S7M|36CPCx롍-FW|qq[כq4o\By?ǧL˾kցŊG[h9OjzV&eѲk:%Yin3|T&mb#`R*~l+PbNDlvk|tM+FhV@>ʁ@1b Eof'0 dLa1s9r2驓 k{DDC͸W}AU&RLuL:kCơ2ȫ¸3ȚN1 &q!+;aMoT]Uc8ėiܟ#mK*rg%!jxLrH:u!iB)Vq«Y6zPjF8!6kȃ!p|Wы{Q`ՂHAH^;t0KV:kʪq9 +bk$P39%Ö>3O4 K_4ݞԛ<ϻ}mܻȅ{{ԗšxtr2\c htPN98pJ,dz1z 5Y9 C=FExd*5hFtPTb,;Cɋ\#"OQA!) 5i]SHkMRI;dP,pn~ c$4iCθ2%Ξn5ӬX\j$CĂC((ׇE^RʤtCԩ+bWWhx1<ǧghzzMhy^1BJERBmI5<^{'? B%*J by\1f8r|K,!AvC1Y &kފ [Z ";j(BY/C kRECGvt1ɁF)뗐8k7D)/lCpL>G**Sx0%8qȌff!l)IŐUK{?!d&S NUm>&4̖[[+/4Qvr~nGCMld0 d#xH#zW_uϐdɡEm93,]l*ygT<nw{ZG|n㮠]pɣH{b>*~L'ƣ6n`v#&SuDV O[LӖ^,͎`}ؠl}N볬hTO;hlfw|~s;VB˷oS1mab/E#9۸E`_k˭m2WQHA-E 2lA-% 2lA-Ȱ;DI" @zZMի?e9,eTd0NiG[͑ Dۢ$Dۂh[m mA-Ѷ DۂhePdA-vNA-]m mA-*.*El!LN13M҃3MTr\L w.] w.Bp]!N mI2.Ѷ Dۂh[m mA=P}U *|j&Emݏ g[):R%sqyZ{f}]ϋxBVn5bZYK0XF,a^!X&HSEaK1j 3ꄷ+gL.]ZeAQe m1aɄ!Ի 3FB7Ziw6`#kSLQ"2 2!jf=ފzyyO^~oS+Lۭ3Bzs+Z`Ov8iǔ"P5vX4"z (WKM$I8FҎY | `{yֳ7r6x8D o3V۳@M<4Eba4OzbZ&NaәrS% T IPJ!bIWZgV#BqFbͰS)<؊^ NP5\!*#R"%1v2JaQiƝVXHXB$OGcq3C%g}aMxź [L"wUZ5\`sA'quiH/Yl ? 7w#WKmM΂#q:G&l$i o107DjdYH.mmc !U2FEACe 5i՜K! *ZcZ!t\oK8$k>jsc\ip[3L:dd+Eğ0k4AfN2#3!TQF5JN) Z~PΫGh$dv!F"r|/Swl&\qs 8 uݛ`\8 aJ Mysr]d2'mΑ|>?O9;?c~wՍB$?l{_BA-ݥbɰARcStciK|M?O\&2 Ճ>uOWɵ_|rQ1 sm~ppyՎ-cFяu~ 7a1Vʖ_ff+Ydh}|Lvhx7vMD Uַ:VWUujaFRK6*|quo5/m_6+7v_z}|oo/0Q//`mp r% zg~ډ|^M˾fMS6h>{ &>OVdn6fnDy#jиw7K'~lB:=ٰ̯[AS?>,sܵ"Dtj DP}~OuM+<&I6o!EH^@T1!pd^{; zRp'@MS2Z%ȝ .@ 9f"ΫCt8x~oTin)EDHbJ,1GRJ*T3L3'Ţ߼}[l{sIyD*a;#1{lRU37)⬩ G/IkAƒE%e>XMVqF{Mq]$s@ P4I Vv }֪W!)N^#ЗAPn&x9akE@&ȁU¶&J zD4]7rVKУ]Z?pz~J 0=7ԻV?Zp χY(ܛkS,:iw߯m{),{-ݝw`4k`6QM NFM+|O7U[tچU^*؃ٶ>ʻ]YB|,Nzm6:`|4$)Wnf\R؜<8c$8/$1B#" Cɱ3Uxd>Z7 ()& zKдb&Bo9OKt-eyTp#" ?lt;M͌tbs׸azj3|/,VzKQImi_4:ŔВ\ cPV FÐ=&k %IlR!`V0/2m.D}EێG~<wl+qR]4صa!=R(CDk=5QPBDZj&pT^aBg)4Cv4H5>5IQNR2G:Hcz޸c[yzl;y{ ^ȍ1W PWz L\sǛ_? G"5&F2Kj4H+af#=yH㖴L4)vZļ10h(X TsСX&3 M s0ǭp)UR`}T&KV8-A5BTF'1 {PLD5A,`,Z]p tPsJ&z?JYfN5hr47۠Z9 Cԓ9}Lo.'Ƈ %#hVP3] CkCe.] ܉XE߫A&WݰthSFғ?Lxi@II5RG. 0ސUsP>>]mO$G+h>\CFFdFH֝ζV't'lY  gl_du74/ t5ꨪ'̈p1ȥ1|VbAt3řFx +z#m~;32P7?]{, ^엫:aW]jhzU]hO}n rHP躄5O=\~e佦7=ܾJwZXsYKElN͎r;81 fB9#O{u櫃>r3/yx8;9|Owuy]s8\jVW}O{ˡšrLwLߞZ70SmC6f=̇r;'vW}0J Uw`MflHNn )O~I I70cɶ`jB٤*hMEO/9$W@P.pnh:挧'tT 8tC!+R刱h9[PL 5P1 &:sP+RPr|蔲V .gK&q S̜:$25NgQY)M2ՠAṲ TƇU$"gx%E^ia!܀ӌ߄fֺFE2 DGI#xq5.`HYJEDmsr'g)Jji~3D})1Ĝ _` \HE7ޟ/= HId3եC'/b瘸sF[΂v"7mG''ݱJ=ɖ䭸f Z :cu)XoYv§IGDT*UE%HIHhou ]"C٤\fgY3B;AQ8vpLfӳ/s#n_vݼIq]wz]bvk.9yèMӤ(ulZI^93dOG3.ooJHO f-5H,JܔkRC̜=N2'mM[:qwL1:"D 4|k۔w#(0`*QOClEٷ;ʁ`tzkӊq2BqǖF-|{I [jqVYQZpZQylu?Ŗo<4/e9;H+-ߘζ9=D4^`  VNVŸmPȣDD?cDbU- J!bشrbbٵ(4`,cq+f]p4)XF 79kh (VjI_jY.<>ZrD62˻O&GC{Tъ+cbdh494!jUb(9uܐ_{i6g~9!>qg&'߽Xg㓯+RfG*w3\qY#^yRc[+0}]|֫^ʕ͆o~ۖ3D6D#BWrKM+6 ٙ`rG.zXZ hoڬ|^6ˌ6LYwg A+.47Mt jcBpWL1eR@bdb=!eP߳ $Q9Fg K0t3y8wzrtƪsg߹8d޶1ĘUk59\KT \oLU7-5|L2"O*sFQ!kF`] ֥IUT1M}HQȦ7.b$b:lj{ص٭ Nܚ*oXrq z]r&̓4.+Wc*~>+|T9X!ytBk[sL8`-c#+pV­%zj{AS:p3@^)ܢq`㷦g\3w{zƉh=㚕৞qg'`t[W`=p%:We= PQ$8=m##͵eWp(+RbWևٱ\=9~ HWjj-pլjpլt4;+~64u3 u>|t+<8tiGO?)%w윝\r7\}:IO@?j!On5[Wςi,JnwheEp}ꭁfvWZcfWdG"jaWdAj֢;\5+ibW%mbWb5pE-pլ5_ZHr+~|\Z}cc| ui`bF*$B!VUg.D 4sywϾ[-]ky6À 'mV(9曝ÃSVܟ?\;oP#; =ewRyv+{/_o6=}iga`kkv|r?]K5bu]D:+x'eI?laD7)Tm^٪3sł*$]QaJkR aXd`hSĥ YM{{aaښCU8ȁfZvnM]@횂\g}ז:ׇ#n6)92Ċ҂വR_l}$yqy77S~AN`ZX5!;Fk z&c2-siugGC$ H1vUy.9)^S0S6gcӓLWo dS1ހ)=4zH;NjCQ-EYzvx\D^F:ݸdJ3ŭ\/%YM`Ղ^N[dh tҜT.ZT ԐXfrH@Kj.f2JEp=:.aMʅP889k՜Őӳ'1eӲ 8:'e<^,_`WϮggz8_=h_QˆXdP#}g)qHjZTBAȹtT}U]]]:iZt?[WIۗ $HpTNp##&#if pCf D:"~!rG`83^QNF60ϰWry Äy}i4pD[-J ͹6emrd= `Xc.)[Zpf%lvuV[_,ṖH*e;Djĝqql+^:wAiClWࣈUk\|?Z:S#٫ OM>EHãߔ5; wGwl57:yy.;DubI"Oy(X$F`8:^ɯ$)Jp3#37켾={6}maz9l 2ry3TqрsfEKNPD$׶ǕLQ_jQ8[L !$5uO;t~?y; y˛tV=o_?#MNLdzޜwf>_PU!tj2Y t.n𳭋y+ƒǪɥ6*rb-%x8կAە;^qR-)bTB(\^J!D-=B(JƏPPyx6&dH!R(Qrdɰg\o%`c x)x:<;؞RDK4+& <1* ˢ<`OB`c3M2XDαSMhX)Ql=N;[3ø)a1Glqf"nj2,guu)HbA/QmZ1~r^+W &JHѽ~Z^E<8_fbbmʀ)*YġT&/*"u;04$19.+eF M6}YGtj%LH0Ni` rc+ᔏB3&WvV1= Fp~<Ҁ׫9BU2bǴB~{6TLMS3ָɛOZM  :II= γyҜT_0FOaO?]iTEf>aA?&nŻ-.ÛvNB'4A+\7N4Re8iB0E7ARP뙪 WvxӫfW0e{n_\8ACR䦬V r ]f4a4K`q"^sWե?bbj_۷o˹k۠y%"(=y@4 G^wS&ϋgj.|~iTxu=l-f1W`R};K#h5>QLZ|<sp5mx8qgmu]v>JViR=e$ú}~O]j 8 6Nqfߣo 6 F &8~H߾w?]&컳?Ypp' $$S^;]\]CbOCe9Y[;PQz^|?\;|3%Xx[ل:[lf  *\DMՏw"bA5(r tK#gfw}$4O#yUH`0ńyr*$J)Ý3Mh`J"w&XT@#}Ɇ9䕵v *4p""$1|ZE6 #(SPICT{xqM'˚A!;!"ov]9Moܹ)gYu yAfٞV}Vg-?{MrP+R[2Eeb"q(P;0]̷F2J\/!x>`k U cJ#68R$#Y 6A 򎹘 sAGw2 \ge,"wMC_cmJ-@Gŧ0*fp]R@#P XFAzͻhia$qVXF^YۂuWkJ}e{Ң6iXe6μD8[zYS8ഏmb:5z`Kx<%}*z!Bݻ?7kok4n̰ξ䪩J͹a]jD,`W2)]i8%E^`/<,bp) ӨGE:g9*Kc¸2(:o;c PD佖豉hjyr29tه𣏕HjlUq#9z")u-Xr$@㊀pMP*EyWCRDH@.p@qfqfAH$XYP]PCSh xROnee!XHlzSt,]7T_<:Qc[em[k-ɗ9kwҿlϊc 3CT༐:#ڂHq"8 n=CQ(Gkb\J&}ԖUȐ93@*72f#g;2Up97e,\d1;b-gͮeܑ!9Y"[UTn64p ~ef:Z-ATF% Q8i ؗ"S*FlGKr%V&8BL !9{MJlR!`V0/2o.D;9ێG~<6ΌG vc6쐉` q[ٔ(j[-58a6+F.qVqv)exю Xx$!A=rTiLf&ie%U$ e/sܪ=d*3XM]U^":FB[t*+l#XDS{`c߄ 9?[PVZ8<_\9Ծlz2erzϱԓ'חSJ7 Mn@3vGHreA\|#NR<|dѯ f,-ˢKMQNf R>۪vm6\t5f8<4 ȆG.W~wWSzO򭑏-}枟O*coAϼK#^+\rS,iyE2ΆfYka9'Ob1hzZQHNRUˆ BTtrC+ VVT^tN JL X%Hy4E#$W0'> d Z}Gj-ʛ>e%3(.萕ɤZT'KdU@ ˈgG>$̩`HֲJY!OG] wܙ859N?E$!B1 8hoI@hg4(R1H!t Ʋc|eh7*AY)dU-83m MQ`c9T@fmhRU9+%;R"[T%GSPXS9/hυ2njchCQ#$1X@DA>ٜxB}ܒj-=z+LU9Ԓyi*F62/`=L0t} .csƱw=[l .(#6$B!#3!z%XT.bdEkVOlO[=FT*5;+qAZ+(H>)E{򀺞S Z-tb}EKRC *,}oyrzgؙ8OM ٿ#h[g'O8Lq^ɽ`g]~qyP$rىhDD"\{ es;&'AIr& NX:3. xчʰ!ie ) ]`?SԪH]'zوWkщy^1W븅)xAv'_TO6KQ.7Ż9|oIyː(w(mi2;i'͜fŧv*)jad044X Cv' s ۷t't+YST1o4%1Q*]n λɰE*?`t{iWGO+zꬩ;/ @O@8]jF߃bbje\n8,қ{/Ы+`dQ7G_Be6{?}/g[(#Ѳ)(dTV!ythkU4>l͑]g.ۛmݾ6 z %뫳'-vn?|9{d J]1*,J<`"ry]4ئC;#6c*H䯑2e ;HA$ƑM _ Yv(|A_b2*QF;Iʌي2'"- T=![+D\dm2MƂ-:c>S1'v ;g3D>GPN#8K^paqQlpmAYL0El]t&)Z)bVKT [JM! EA( 8,3]k> ıH- J&&I^Kr,f$њN+7)4Ĺ:BX9KwBs(QAsvڐ Yg1'Ok2Y{rHjSRsrsȠ3 +8Y _g:7 f0>KEHW8C.E mBE*m:S{+",470O/יlk|c[R>% \#O8xF 5XMޅI\L<+ṭ;XU>XGg7xS5E-xHYtl-'˂@&^'Bc|XOƘӫ)G&@5o2{5b؁IԽr]nuu˵w#rd;T5sl6(ZMd.2|;^̒Ѵ^( $$فb@L,G)B,&4A2P|u&fhaj|mPjeSf+j'+-6%ercpсۇ<߭6,hh‹cVrrn,nVhJޒ9y^rm(̟R(ٞvԙ9w#dəDM0 TpI.IȷtFדFaZ~qyɥ!7{KONJgC01qza9] YL2t)@ iFN6\ۭ[<-{he{uY݄N#STBյXѷ<] [O]~?4?gѮsyrmf@{]uQq_ȭgc x?} ֈYoכJZڟʆtn'OCfzF=mNb5wtT*S33dRKQ#qF(ig{(> aCJ੶VLQ1 m+6DuD 'K1n)DQ0HӮvCJjP$EiK섑gQœm+q6oB,//DZH+rڷ P1 EJjVM5 .Ÿ5bw\f RdQy(9EJjQeWUWGZ:!IڔRlP~abB]-5ac--uFBQJ6Zr݆/9 +DUNTL/)V18Xl, $ "E6+YEG52+#΅EO 6#XWmko3nS:UWY blX\y]PEN|T fr@DP8v&^e0&HJ@ˁ*QuEz!:sB&. Baw姘j:k)AC+Ψ \jB )<2`vq;ȃV7^ԊAM(MJy19#c|2+Agޫ)]5:< @͉*U}k'ZÉ7xՠEe]tBѸ*>UZ +'":uW/6GX\XGzXJ讼+)4*mUVqW#rW=NqWU\ ⮪F]U)d+ĩWWksxg y]=OZs Iz3ܕնS`gY{]Uq_d}wWUʾ%z!w]]oIv+ oUuo} X$"<$;/\++D{ )R4cYw$@ie]^)6l7gv[ς7VWBե砤O^9 g ]t5[~Atbjռh;] w:2dXXC+c()Z4<HJ/`CWK6Ӵ="ShiqDWpq^@k@F銭aIE0;σkZ Nv;|)7^[JFjw~6N%/duÛO ##PQżܡ|c~_-8//7L`~Am8Kۿǿ>}K﮹\Ai>ޔ~ր=^盷XwcKC%xZ@ojv9#tq+:=mq{ =&ćߠn>Aru۴X?LO(uw5r ={frLL &luRXhjc}GSfB JUCfO9WS*N8j^UK cTgkkZ}_s'Wjeߺ1&XxhgɤY[n͉!f-Ǡ D[K!BMP![%ʵ`0fnڠ5WzNRl$ZtmeKHXR!UZ+tY[PRyiNM#$B'cnӈfhn^3k.\T rz7^ DF0sd^/ouɺz4lҹj;Іӡ !LEt(ιc0 3)M~w!0ИUR=^bC-cB#MYnj(`ɘu!g5O >5yshZLM%9ՐJJ:Šs uNF :5:9Dm;z7EעֈS#NE[RHHHI?̧E)5>9* X )$ڨJ/ g6Wx\6.WP< I;MBP`U} ٨\S>EC:V)3HT'*Ht=) ;*D{j%x!Ԭ;v4F%|./V.2rN3rSVc{5PQAm>)h-AmM+` +AĹU X.:_@2h 3±&6VW]S`1wuV6 ]˘ 2ƺz6?s%ps L-FU eGR7fa=g5V~aUhS%`}5 ɷVQN=RsA19vx X-VRլl`WP&4$:: d> A\R}S:NJි2 *$_LB2)@@WSJ+՘;.͐jPoB+"X7(c)(dP $ ( ""*f"-FS&nx$$XYu0wT \|})q$8TR 3ˁ UnI `bBQ AP{s e*3[(@Hq`Q <ڳ<;"JPAw/uzV q7dwՕHޫ0I]QvϪ J H[.b=#f^n!G5՛Ɛ_Μ'ԠˤDPd eɸGҰ&4]AՊXZIBufpG \;th8uHN7**WeH;L'Q"_ $4` rfۮwOAtr`c{K.0AACZI|txḰK<<>o:@GqU2=$]IV*#d`1%T $v 䁀:赈 )-f)_bDtvͨkn9Eʟ"oQ^1 bp cFYTPcD$΍-Q;L ;A lU?<6*RU Qc@s@wD37+VjE5kփ*M3| ڴqϤy@LPZ CYGy~kPnB4?j7l5^5pgB8ڢ* g`*ga2SEiaN3 _B\̸/vO34%0d*AQEIJr,Cɵ+F @;h ` Yc6Ք rDL0r( ;fA59<"JԅN Zf !Kk|A9oۨBBw\`]pJ R ۥ^c!.n-nlw輆Ukk 3Eݒ(Ný9 A$׀_q>5Ć[!Y/x_w_~y/z3#VKզ gE%]\?vjߤWymW1[=>gcs'nn^ƿR_E3i,+ܼNIۋ]x٪xvSqvضmܵrڭaqNUX]rW7BnAN \'붎޸8P:%NStYV'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q=FfIN r@K@~pAi%$@.'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qt@~ @lr@7s@㪥>z'7F8N 8(N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@':$BX'^hhb=N b $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@:'K].w?\eR.a ?:IN#I g[ip_U%YUuV8vH=,rp}Pۅ'U}憳b&hZq)(ٸ yJKaҵW1[DW3Jp%i ]ZƵ뎮!] [DW ֭+z݉ZrJ*;jݳˆu`fp){Z -OCWm@Wmc"5i]`Pk*-tJttP*3+ӆVH{P`TT/?~7~HMP$g"sk FJsI6SЅ{ ,GC;1BL7Ħu(ڜ]m| DcrK]>H6)䢓;kHE>ג)E 0-I$'AQ^uUe,:uοv:Az跅XS<EU-9,ˆA JcyFIBZJbd|ߔw3W~q%} JN2竜 |Wp߿ЍU_RJآC_'s)T246*d r;YV(C X>/fUUGsm-W-~ ߎg  jxACz͊ku8~[Jck\)ӥ{S9]n{ekf-]h`MYXVrEެ 0K--^L+_**M7S-^k單gJa/_ySZUTzԕ'b p0?b"=xmLJEMV;0Y쿋AU)KƓeyu].;Zf5`.͸RoX1jD2qnޜ cFRjr?Okc,c0GM,GF!JĨ6xΨjg߇gKPŒzkr:Da(J [.^޸~Ølʠ9(A9swa8Ftiܾ\'`ϼZ8فQP v,+l4z~D~+|j3kv26֋wKv!L(< QOYsnqV I'RFH#R9jhh ap) c*5*]Ly>βzgVùϰ`56֒)qH{X^NoQnҼaBhwb:@sJY'+q0|k薕 Es'6ÎZ"(P)$BBzŒ寨F:ĚaC`(—dn0qm!LHwD@4:&N ܙaQiƝV`XHЈ`@d:P"G}LI"e۾UWqoz1-?) k қ&G^y7F@E0|ٕF[4 2JҺ$חtT i,FT!kY7>ÒS+Q|X >zrUf?r:9lm^꼒M6UR'iR=u$A|؛$dØ%# Yߋ̔[2 Tusǿ:W盗gg/_9Dٛ^ˆ7hip0(5B;Cy't6Q?O~ڠhyT{>k]7}erC7Wg=l-R(=~}|Y7ǟNAW#,Ӄv^NTin)EDHbj%vhtˣ@L)B% QAnMg/k:0vhM&+ ޳ XrXY~Yu϶+;fZZM ekNQ~ \ |{}oo~)EEuKz,~`+m!_KZo ՚v%D`YZOdiQ)0KkSzk=[!rFsY04\`D$4n۽MA;K\ &c"F!5` s# GQ  t# 2ƒE*L<1a*]L2N%0 1O]-CZ,-cзAL@Gy۷]ѭWڤm9b4*㭘" %I,(84Ui 9b*%(u@z0#L BTIdCG G TzE9<^ * Ҁi8!Z"HǿXPRbx xv PVr=:"'8#ZF#wh"ΐ0JN"]P +UD~g8zca&h 3[Q^&pBѢtgp(F bRyuLvڠ<@΄i0V9#=rEN1OZZ$iG6;1%G̑ >6""VilEB+ceHy>Fy_Aφ3P fO*/z7=l1Ρ^YukTյ0#=Rj\ա4{ULf~e8RP>r&@Jf?ɬ$8`@I44iM{&.l0=,e|V#E KɃ ҧAt#E\stW ,93{ꕃ3}_R١=n+>U{Щ[1l<=h69 97wlcl-D*6ID1P6Iy !`{hxzaFX3:$`'ydԬ~reo_h4]"If5 m˙>4qn6dCHjꌗkśQpo ߿Ng/˿_ԍ^C7=y"/_x[SeQ׋!]>B/m#Gǽtw {[Lp OŷXbk%;3+vdɱ$GN[-v5U|blM9-g--g-Md/g[*v]돾n~474W|6.^ߞqBpB?nflum>yt9ɷ(u3~E%nސM^*8XQkig6lէ̱꿽NN_ip^)2lgCmǾ~78i" >vI __DG~vo)ޢW;-|KruA˱~cg=ݝݽxWbh=&3^0Aۋ1dZ;3ߝ􏲖)Lksֆ8k1.WA/鬵\IdJ޺-̫Nr=+Z͓ɞ~_+SOc~kkcW/cG!Q9\ztbOI`6|#Dev-_xviS޾b0EDNfǍm_cJ5.1hAM( ih2C~ Lq`ٙ5GԜ o\$߮5?TR|`DyxY'صW!0!hlxT DmRV2{08NN nirZcG-q,s *Scw1(R($ %ЌkKVJNpV!/[()f`xnH Iu5/ń"'%Bj @dPYQ_QS"eN𳟴ɘf(.jН^n ;.7m֛- Q]J׷ҘjA(mrovb9)BqZ9D HZI!}Jɱ-$ L΍Q u}[l1{@#MLO %0 !g6eXrc&#U2VgeUj#@[a э Q]]depwȲ?RbqW.hp:m:DV.{`f}69i$e JP'"ZVI0ΥjȬ H2y/(fSFE+\rI9C,tFkFgqbjWZm^jjvg sdPXVL <[ w{#+IN^l=X 4WĐ rE,:dDlR$p$E\;N=&~{ؒebfVk|jEe(F8Zĝh&'5GIpaDVZ^9)%W6T8cƪ7Jrr=AdHL1iDU>Ԏ&"ojOvq:$_gQr]hwn!bEŁg;21615g0HK"habCla&lOb.Yz;G~p. x|@Dя iH-Jd0C~h 9m6*59arpeNnd0cR yp!yT <FQ -**K CAL1H<NZ+˫?qP-KP&Ě@X?|Vن8Åfdcl@2 - rsn\pTY9).Qdc2t#İƘBE`B /1JBP*Uʒ I%LJ$$ρ|`z4[&Sp=oLDt6|ۇmwͅ:Y[d_oWJhdW l"DHX 2jz9g:Kz6h^W}IXTXUELȩ7B#*V{n5qk8;㲠kYfk\Kݵz?WD0@gȾbU3" \Thuf5;^ <5,6 mᖚ[j64nyP[$K}n)D$THY5hilv<Q0%HIK9Y64y)-Yw-5?[>LZ~Is|0-QΟ.LV~[?گG/1`dBdY\JD :J1-xc)#JNSú[ 1*~b?8+&Eh#[Ov$ EF1&~%^$QwE;0%ӭzvŧ-ae7?4sl7s,]񖑻>wuB+[s [FS(ε"?]zCp*Π4@K_oĝ%RzZ@4Rh =Mjh\BaV慨@>F_D:^9,@&kl v*XzrGEkje'ZjgiTS{>Fkiki|PC[bK+<du {Kuđ!:ı64]]!8uEL.8!'_tlL)+m)yiҪPAHĴu^QݫG]#yEumK_2`EP24eeSzQq"|.jE1ÚkXmE BxPRbeҢ@S_1L34lrކIz['GQz/ Gy-*0VWo%Z,klJR겪*+%BUAyN6rnٷHFja44d4NKۄ8 ԍ4ZӰӊV25q'ghK% >3AZP^腶uZ@օ*hQ~r8Lq֍^0:C~K ڹ"DP0EՍiCҔ'ֆ?uie֜t()oA=3Ѩ;r|h}SmJ%<9u)x㗺|Ai ӊ9Io*"ZZ"&Ioqz2A=0iM QJ]@Ŭ[ywepDu\6YHl?9@ n9#iV,GBZ'J[eS6ݎi{ OeME|nw2dWU؄|MZC5`ʹê FКt3D&atLCbRۺ8\ғ^Rt/|(#'w 9d`&wFهq\.yk mKa ʢ`@W9S=17;*xI+qr˞ #MơBi9fSrv.=%C, i\x2HsRyაluTW7M ~zrĮdrwRL9weϘtնy@Ȼ">)U];YRWݮb䘽#@ܓyG4M\o/sԍ*nJi4-CSśf/ɴRAEe*_ʨUapK:K:*U;ޅɥؿh|v~M^lkVۤ9$"X@9MiZ',?AW&Иn[iՍ/5{mhCQmImcoS B/UX}۰Yuz3zV '*_Ld^fZb/_*sE/v|m 5T[gUzq@\yi X.TpjW73ǕhzVj`%8RϋqjWT Wvlձh +0 X-bb^f\-WJt@?Bۮ\-t 7-<=:P0gF V = 2{L!U!p%3kKKi㛍\*!J4 YձSULPZ;)NW,Ywr}*bƎ+R)ȸZ !LW$ XLf jWd\-W %LBb&+wEjUʼ\"WS :aSUџ0J2+Ƙ7H}pr}2[Wq 6d߻b>]\@ZUQw53/ǩřJSp0XKB+e2"RTpj 2`gh&+\2"''*W$jAҖ&(;#KL3 kPƁs0xҫ(MQPd(jҬgr5`<W$IW,t*b6z\J̸Z$PޖEErT_7kMuӱ*Ivu}eMI}Wͫ[pBS{O۰|^.{;oCKA6^է5%8Wcn Q.d5ˋvD~]V|C"A篻ŧBR^sS35X]r~yIM37oӴނ<{^?jUUۢ NP׺6EUR8oJ$s3y9G"eoY]Y@ߊWP}ٴݗmS+<Ͳl,{L5;+)qO;~͢{#Z4GE*M Js/a'6 ,YIƩEUƖ7/emC13 v&+kT*b tyB%%BB"* ;wyqj-Ǝ+Vrq(btH"\ZbU%+&%+ Nx_`qrpޭ$(nqr[z1S&q*{>`r#p25`"ػ%؂LW,wZԢWҹJi ʳ(L3k>]A'Di&J\gS(ҬRLRZ0^&+l!\\ X\ǩty D\(dprIWD+Vi1j2`Ptm:bwjWV.Ypeiwłu>\Zџ0Ji2+4BYpB"Z$dǎ+V WKĕ3Z%nFTB2AV;ݣq*V"qu2 ?N-F]yB茫+?~-v,Or^\SkfJD1N,:֏ϸ:*eBb'+D*V&:NJi4.KL{Z^<\eDh4 %ik*Y\T:̘^ %+,58 RJViTq 8M:{v,WTpjubTθz.\Y/+'5NdI1n"ƈRѸ~ڻyAQ;iuzXY 0JNDi%m GXsM2njX"AQ r +Vߑ`)6j9Nޕ+ꃛD]޾{t{S'҇t5w۫ͻ-=^y+nWT+|M]Dw7x:{<ίvܽ-gqM o^ZWRUj4/ /}@-uIg_Mb2#܄kzaX4y#|!ϣDZ%u-ޛ:+jwO{* tT56_%*qvPGBϕ|߬W? :Gbۏ[U;~mvA{7WO+\ԿnzvJ@Yl\ &mxxmsmX]_Vۖ)\w| xJ1lͳ\<"R/1 iaoLIw7|1PEYk|xo鳾r_;kPtATqGl:qqq5vCοq= # @H+9DV:&ŗ93u R*/6 pg! 4Q JZe n"yW^__~VF󩳑#}}lm-.nØu\WVW_խq9VMOnz5>a҆i?kW]^ܕ7|w/}X ߦ(?dߏY{_u߭O3ۗS){U a%O<7CLhV>6,ZNQ'iӾ#rj{in'ۆb5} Y:5 1dmv/xOP6^]tw_ q Ϊ[ʐϚhh M6Ts3=/FΠ+bg 1>qN0G<Os.:N셑|C}>nq/Ǒj 셜9|\5oؑj%H`J+5A7Aǘ^U(LW$i X.$+V;Ӆ*mqoǔֺ329ϔ'nJ%Mi 4˝8(!vLJ ĴT\`4x,&+V*IWKĕ™$ LW,W&+V;S*#˹q< $I XrBG W&j9BdBb%+++V߻bsیq$+!k9e6zUbq%+!\\:\ڧ^i+VKjq:@˵)Ͻw5J&`V̈́q*! GJf\kz))~\`dpܹǩWҸJI%+=RTpjWKqD\i缎7_3΄]+ݡhJ!4 Dt\"j;Y%،bhpł]:b>fA!Mb*/+K%4߽.OfES6oU8LO, eO)[?kշZmQ k]TJq8sJ?ыsrp$+˝; 8@Kճ4 FxWq*"qep•|:ɵ*\Z+(N -|x +K7nkv1]uH^{nOQCO+C bj f)t5ptDZ ҕfeg< ZX~9'W?Ai RhhN&'H&‚ ŶsՀkS|t5P.BW߄Ƴ oǾAf{GsoFv9f4A#kK[YK^Kw9,= xSV-`˹<~eG_Tb1LCXJ'Xb5/CWn\ 5J#70"]9UX .\u5ІOz:t壥E+f^-ţ/{Yt*@/Xkpy1CZʃ=Є]őO nz)t5ڣv5P(t7CW|ۿ\\ ;tϧgg` дM˴)n\|/_ CY{bPk{Ԉ'o?||!L~!ہ޾tvvQ&vۈrfv Jgew&:G6m>cNwJnM]hB􉃳j`>dݟ}L7Qzr'ծ5~oYuxmlTT1/@$(?1-875|a8}r9} *+ Hm]ry ~u'99P1چS3e8]H %x ~y/mn iaoWNܟ_*mO7 _tOT]B٫5]5L6jTm zC>2ShF՘= )cT8>\uFw+qT{& iI}Wjeߺ1:R&I2L[-hsbtD1ҁݜh}k5D*@Q2 F$Fͨw2R9Eg&ѢkC\~j*zԬ*"n 5J66Ԕ1bN" =vX Buصvh춸1kcE{))w/pGD#YDzY+EGhvoX khѡtҔsPaf}|Bȥa1I{'mv( V!(>:D!;z]FlTIگ՟,MVTHk^yü 9YrX\3 >O͹xޜ@cVNTRZϩTRU&՝H9iE%xu})9Dk(5)hK si_#';`曄E)>9* X )$ZjC_2"$1ڴCԨ*O|"GfּXL)&>*C֪+הOQਧn|fnJ}Q\` :`֞%E]#hdGhOPw_Kԑ#1i PAq)JMdLB5Xvl:!L% Wћ+dW40w\͐jPoB+"X7(c)(dk $4( ""*f"u ez=t,,x:M;*q b`.Lƾ8*)ԙ*QM*9oL&vr/ōbUڛk((Sѝ*E4GRFy֞eAQ 2#}Sm ()g^\F*j.k UuA"FZ. 3Y`3/g1$$3' !5PvAPw}CpGMu $$hLEQi,a3m>%T MIJ Xut赈 )-f) 22߬n$Dmy8 F `jU;R@4Xq07r}nz{9׍$j`֣;Vlfѓ2Euh0N˿Nd4QGmCǽ"^DHY=yh4vM ,7ӣa#f'+q &% xKd]0+9]PnDVܢfI,WR SA41K'X Xoެ7a; Vcw;"h$G\k'F4$\sc~?E bX0j);Zƈi87F1 w01t\:'X:W!hSQYǨZ05F0ۦwܬxZ;5֬Y 6Rg(HqϤyJ!&ci@Zq!; tmr4ݳנB]%4nj-klpg^qE DOU>:jUNàe6M[̀| =pe㾐?Д n FUxZZ{j(=MGPrm⊑d 74JCiކ`ݬ1jŀrDL0r( ;fAjr "_~9!6 zoWެt^Q.A-m\uW>}ߟ |zwM~vc@.SvtcHqrq>q~g{[j(xN1. N wY _X8_]\i6~Yֆۼ{uk?/7Wɋs?tQ#.l~Gz|޶pwկVœ×+d;Chc޵m:;Em=߬KE?^iI=4޵k֚@~. ?}` Vv 5\= qp0Ȟ0\mQÕ2\#! W_iv8W1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\Fu$r Wx1+ t+<ؕM WOp581\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%j-pe5 W@jbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J W_p傱aI+r Wb W@ W@H WOpZN WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\J WbÕp%+1\jVmY=|޺ڡƷhwi^ 2 6h 2-Z6,,`)\] ] BW9z;@i+ۡ?P}swo-QYބzzG̞~<[k!Dn4JkNYPz~zWNچ?~_MkI,\ i(BOM$ qRjt5P(t=qeO!m}myOg[iK+?mÌoܬ1|#Z=xWn=ˉ )HVC49" Șy-pZ$Qʹ0idjLA3G@.Kc9i[ɥW;Ќn~?wPŸE?C)ZOj|%Vq2^?~ b" XQ((`B n AJg6]4Us ަݵS,xܕXeL"`2hFg2N͉Y >KfuU!-"T<߿3WASL+rѼzúAO2㗊~wr+K.fVϊ2Roqgiz+{ۋhKisQ ϡ ױ"ES}qb}]!s{,Lt,]3 W ;.&qa/sopWsk[1ٱ}qknS.ŴW{kפM}T̂2bs]Ͳ8e.X Et虾gʡe7 Λ *6&<27XX-M2DHyu}h]6`|ڏm9;з`DB/EK9sl $2 $0ʈ̪tkP2GB6ag #:&D,yzb3w;aqpv#p݈DIAO=L¨?-wO]HxCլ꫺M`bX-5֦Ӝ){fAj$%L$`=\)" gиe%9OJ raHK A9G̉2OQyEi() I9tXT,W%c)q(c$ci- ^``C"P xE)uI0o;0}'iY<SP&gX30SܔJ-3ݘт64۾rnrBMVnÁ/$ +=xg CE=-9`hK r@} ;\@#(Ƨ$^Ca#F2r1:XN|@]Պ%Q}B tL!Yiq.H &c4do@+FΖ6N=$cP/)y1ysocGW (T,SLN3QuJ0&'i`A"5ɣ! 1 ldLk̑XR Z)β>KnPf4$dۀ<b]kBNW:?VCMɣb[5/g` TQO\v> }/h5c㠃@1H*,!"%@sȀct)ƴ ;ʹc Cȃ"(K8֗N|P3,Ev4E6?84R?%S D 6NzcjCX5[pDAqJ0`Y,],9[Y)W|bV2jzCI*em8 q'DH9ϣ0X Xr4ѠkCHgvѡHQrBO0#OV`BICŘBM:RX>@(,1)A{oCfr!4食aoOxHʹ, ,$]d  ;&2]ZuTYAme׿]63YSF*`9Et5<4AYTzWrkmĄ s{jyDh /`G"|XdWSo겍g!2`™ VGӕLm,VҺ(c+.x1VP#t|WA(-tREP(uo!4$R4!lB&c;CqV 'sxKE$"_hY")j+*^ =?w`xn:` ~qmϾx#?vDma]B޸M2$I.6h:'kZމf-g\oiگ͵l4o1Ro"3'knBGCb~^QNF60ϰW&ڈqa<4`8--J ͹yTd = F4^JJ8'%Y,- J"Pjɬy 7CY)%n_ HJl&I{3EЩ}CzEv|}GϵFR)!R'hgFX{%BK'.(mM (+SD~TRH$~uMo`0:-o]7 VMNQ}a%׶yvSFT έ#XLZCHjꌗ{K~Wu ?ڿ]ϋWb|u øң֗tyz0̾ t&g yH7q.gm<~}ǹ)tlfX*n= h뢺Qe3? gN\{{:1sNt Զ]]i}>]OLw|srb ćO=j.Jr`^uu{ o|a~~{yO~VYݛ7*Z|s:z:M[Oǰ0<` }{r*&,rbM-V)SyHFp$!#R ut!ޣZxg|Nlώ떟~kU% +Wq> Q ŭȮM_xݒȧȓyO!۫l!9<Ч~wC2WzuA:6{]MU״9*W].y8#Oo(}Z^=Z\^~oO~?r~|:0ZޜObl۵͑|u~jpO Ħ[:Ԍ،6yox(U|re5г6gY#7VWƾRZ:y#a/Xo}9ˣ2fXb& fQ_tw~\?ן7濫w7?+x9Epo@?d jUުiMpAr.vVqZr{?MҟƳXszrj.ms=5yV_aY!](.oK[R)JxzBm=]ّ?x) SFTQ_TQYv$>R5 +Y+ $EDdMDD024XL9JlD +!n88AnD[@/Re "1+Hiv5t A9' mi+gߓϑf{έN;`'Z`]:tO/0X<I]L?M6 *D\=힎D^NOYZk\ݱz3!-@ѐ֍r|ᅤ`+vGR.h;+B~IzZ[ާK?gɃer-JZP4)QeZ.9*E˶A?.?&a=qZRޮ 5fϞ:eq>nۓVy)18!0IYse61誐.xMԷv۝ {ý37M-fxo-ac$b]:bׅ0x0Ժ ~IZ+%uOY;w.(Kf %zԌ(&iJ$A5M!s(HSC>'TԒ"uIҗA8 2 քV 1\`dV$)萡жVwwevZkWZYe]WB'0/oGo.{SX[xy>Sf=elz<U)'ԗ'8껗>w% ?ݑa5zt]Iqw?62 v%ԵSP̥}jww?lw![Js%[\_q‰/VCWL~قs?XqtP^J^\GKrM(ŭbG1ayvӸ%!ai .7/kpSML<+Mzx̧_ Q"tu՟u+Df+;=n,c_Spgp)CdkʿqHwm%Vf`.|ukv@n1zl۫p;֫ K{ޯα](oڕ|V.zMxkγA_Y_|uǍTToZ_MUW7P~s6m Iۃ~Wi5}av()jm o隈 tN@|S:*H@m{Bgj/Y~5R^"࢕QT@*9S#ea7$V5$#]dmBz'R LJɪEdJ@Y"OKɴ90HFS2‚94 a-kEO)Q"&b Z)7[d:LeIh^@h4ڧ(R1· h/,,\S3(agjfAJPX(H2IK}Ő>J'DS]ߓ8%Y9d$gkoٞ6ic!|[b^g^iUtbNl%4"xDJ8= g&U!={Uc6g]֩zJ3lgk2hd1lG|Ns-P,JD#Z".sܶBT\hl֎;vso9xG)+ghxҪMm1Z*o'.%""RV|\CfǾV[6r(% ʞ (L2V:UiI6Bchjc #kdF=UN֤ޤʊek RC*5;7m{~ŚX DŶZD""qk(ҬXmdB@p4>(W TfGmM&>#)EfdPꄠJ3!$ٲ#{ P{UsD̏:]_i,.vQvq[3%vE(tJY )]0!e-fኑ9H~a6Chla?{&!Cދ8 I<1Mfiקxb:*}t W@dׂpY0:>R#8]=PǽFBBKBN)3$T ʁ dX2&){l *n>u5ੑs Ԗ= 6>WfDYݔZ' "/^!`GFYMqo9T :I? g !.:˪EQ%^kJL:8 _}M2kg-t4EP@.ZRHiPHgT,q}M`L{fL|}|1\ uv twR1ur~fS'' [j rBQ% ,FEVʳȏLtfj~G`jp@;]*K5Z>0z,CJJhg*Qi&n g}z\KM$WX[l;ۧXbl+Nn-Vw~]|t> 0Fj=Mdo*LQ69L (V ηu/%B6~j?rjeX+֑ +m$GE#x^lvSO֔JXr]TJNNKSd虒2)f0C9#1xΒj}瓽YWkt99T9KG S1q)ǭFvƟ; 3?W`A\r]Yiᷲ[<&\>,H%5Pfj{LeAɒj4g: u `U&׈CQWH|URȢ^[N8&H*]]ݏ` ϬE"yեG%PWN=? u BF]erAJ*SiQWP]1* bT2z8 BjfJרھb~y5\G_B}pSO: .&EӀossZ3p)ǜk_?xi$XQHPt}WәJA~j8=L\z8{vZ*SɋzJqɯk/Q #I=탙7rYrE-Xj: 2P }6œ{S _jhUZ8A,|gyy=Of󡟝{jc% t5.yqaghS| { i; g4uQ\s;}֍h08N[1*= W׷_DkTBS9L,SQ?jM\#ff5&(As8UIPy01H$vjfL"U` ~6cV`v4]z4|Q[ xs Z1dUrD9K6 `HC%S -J*˖TL%SITR1TL%SIY=Y,lu$J*b*J*b[TL%SITR1TL%SITllW*y+޲@,iXj bXj b>b24=6$XKy0m\% 5{l[ cӲ/e~N *ɓjЏ5 *p4IgV8fM]t Iw.t\hi_.R%S(mQ42J^A h( c9ykSȼmAmkss77[[&+lTaI!7@RAE/qiDєL'O UIg:<&E{@˗GJ\ 3o9D⟆clt<f EpBfo֮5OSV86C>L}<_x+YEۅ3MYF!/' (u\Q9J*\ƔXTghќ)틷%(&J`-:zA͡"qFEڳuKb mΚy4.jg1)PV*Nꍜ̀.=Â:ebya&ta0.T|str5wtomziEq6?("Ч|~)(-3? Jt [7~哊l.ZRVoPGtdm˘Eg"rmcuD rC b@ҏثxVx3f Ct<␋*? $WkpKq)Eߖ+A./bRqg"|CO,|KWm8T VgmVǠt 1&@P$Bc*?~CM:^&,/ގNp|{GؤTMR|tH(&l"%Mr~ځlH26rhQCb<4iˤhq~|JwlWhoZ :z&t Rp(iJe%=%uAPM(X5|zjt['c}ݗ].wZ/y< C%(ac %&54rg2x`Ouݨ"m[E+<|< wq77A/q+DP Rva ]/r6-SO'R(_I6?W S;뜏$p(53"]$Tu>{vn^L(Em18kQv=8v(Cd( )o熾23MR$e$&\3r-@;#i$^W Ը$)˩"$E$&>šxaUݡ .7>j)Wd=(ir% q1ʹJ&bI3b1\>X9'GGёYĢ0k nW1CJ9@ymE} Pdld"\Ye_zѲqoxB[ LoP;; C{6W_o؊IG]i"uN+Ǚ$%LhD(` f>H.v(%3L{Le||5%%gZg8jNE&j~1џ޹**$TF ,W5bPe lKq!b=a 唫rmB\nsO)}DJ')T|hO贛PI;TA6TR=p-skW!?ܴʰ`fp'1Ogʓ\W cD^!X.8Ru|YSv }9,y`-|3tlw2YwNM&鴿l[Qrf ^i'3ѐtXf$PM$v2/_)^b*h.X2E@jc *+((3z:UO8gm~/{^e\Jtp/Yo޾;Qu,96[G?]x}_97 'SL~#s,bokώb 9]?dzwF(KLJC#nޏH{S;/oM_(^Rz ?f߾W~-J[7Ծuo}m|\ƅ ]f_%-D_~ڛ^x|kӲ=؛6Z}oov0ɎtRJ3هN? VZ"Nk*"֚CE(j۬ޙ6.]Ӣ&˵욹}\zNcGԺIJ Akt=]?$}{7lMvvdR)іb&f~[Hۧ{;IMl4bf wLBcr]s1AwƳ }H 0~nvvp$!reII;^Pj3{6tmvقM"#-Mu~WZ~&cv„.:{JP>'bԃg%Q,ٻ6$Ug،T`:KY,{OgIԒp}gHE0-ΰU`!g^F sVYRgB)qYG_^z?2"%U!0XdZi!!T4-K6j$':`,Iz^?tqyH[i?opqÆ.g_%7՛;P?X/=V0/2J!C2̱itGy7F%ƨLC(MCrnz-o[؎da`%ì̖Ģ0^h͜]$?+ ƭ6fmdC`duff嬏I}N:mI ~}2v\U[zNmXh_( ֹZ|\cuBi~A_}&}_4뫧'tyQqԿ}ғ؁ښ[]!X{UNɡVvU)q>޺L֭nRu^uy;±}=EeA.uv5\bq)k~=oEԎȈ2ZrU`@A[7 >E%E0vw#ɞs9h6bmVQ.v0aɄAٹ 3"#dn;<ڀANL@-3K5k.p ]+}JyUG^nߙڎV[9ʭK AD!JJ)yV hB)B(2`ۖ} 2@$Z8)< C^tL\YCۛ'e"-ʎ坑Ͱ@#' %#2ƑV-bœ_zw=MSfQxHfKtCntʂc$6kLNDEG}pEf% u&_umԭ]] 7:qcA$3:lI'3ZER, IDr$I U$H}4V/{Q>,U؝EzS5J߆#n Z*9:C|WT|>% ( ~pptOֳkIgg.% 4 --}+ZQx?$&w/)VVs:s׀HsG^[qtt2/!pR ltM9G@,muI1mir+KLZ_7ͅWdz^/+f_Ț bNݯ|2pܮkg?\^4^~=Ig/%]9TwtjrsefyKBٸ'Ze\zz=ttb+g~Cubg3uZi.#ebE$/!;`ո߿5j≿U;w}]˛{q|?_8H[I}4 ??5b꿾SS L-YO=[M]>y|1oc̎bmHw:ߎ$]nrrkkО\WlӶhZ UE', il r1]j9xɤfj}aW^M̽WHl Wn#E I hF,h#U0g#SA2z2 CV%/Z 69W7[=%6Fg?1 oXK&H$"Fh FzҳdpəNz;-z6w6V?۞>逩;ܡV a;c@qԝ_|?_iNh]˓3uAN4ʕ5Gtk ng(h+j!Ldzjwr2|2z xOfo LF:%,r@Q>edѥYcE sh ^`ShZFm4l;@IZCڊ]FtEgq jwۢ6ـڃ rL ^E $,R!+5+!9 $+:dkb"k'AJ 8ȨNƹ3rƨb)D?ED1"D\hsGfi$:0ԌˍѮ<1$֨4c"Fmh$g|$MrhDȒg ܇y;#~D|U{d|qɶ(:E1​8#d"(Eq`Ā1Fșkmج%)! \ vvxsr #7*c=Tu !Gя+boJ]ƓX@Tli~‹m9r?'oeG?q#$*Q(r-'\ߟqr[jGi h( 2KS nQn}Mdߋ67 @i1@5M"85H٭7WntE^,7BHtmeTS50C:~l?jQbvٕO0QUٴ#zU-:3U/$8XE#@ƭpPsM, O5&D)3*[d6;/Q[}Tvr% c`[78I*20DK${Geo7\43N7Sb@hˍukAX2fĮ8Zz{ؐ6Nr:9N|*8bU [đچ&kT%g#jtZs4!tXE?-ˬԵO/ NT+5⊎UkzٚFhwd}JEi/z±h#Zhk&Fm;, 5dT4Dl9b\+Z n?(i6d$8*zL+#Y78Nךpn ʱMpOV] pD%mZ6VsW0.~D 8VgG5<n^?<-AH`ׯf3 Yӝ͇]_|}\tz~kƛ_]_37Rol{5jmz59{XySG~?m3 qb1PeoX*;b ,X&Y2Խ0@IzB9#bPDkrgAE{rP?kRga)}C^MoPQ/f\P>*A<Ҋo;P 6>݂8ۋw?xmOqQJ{?_g.6˔RɝK2n\ r*Hf_>/'T{W޷^'Vr"/yvKos2jk:sI#sH}(ewHD:ayRu^_z9J }N:NPa"|9o,0YN0h* xΕb;6bV)֞b;e,{[4 s3r\M FTr&up>ҕs;[֊6W)KuA]ɨ!]q+5]WgmQWj47+3qs3ٜK_x@ڠy )]9nihkWNlGW+ 1-&Y2g="e]-MW@WUҕ5+-]9uʘlu%M C%9\G-'P^,JnHN4i%kENGNY⻦EL)7+VjFWk]׮+LquLcsqK3rZ>jdZ-ftkoвv5yQ=ܢJL!tC3rLtR}2Yv]mPWy7}4q-+R̵)y+*֒dqiKɠSꮫGW#gp0=ap2ڲRtR+n te[8Аظ]Ԋv]9e 6Jܐب]BhEWNv]9e]W[ԕmQ~R^]]%R/w@4iϨiMM;Hvʬ7iM%tL_zz(u!]46ӢqVYVZjaUoafP]uhQ-H{-Wڲ=l;-dG9^5n/N* '/Õfn8mSl=bQ 晜KWK\tXAL -H @]W_B|I-ouʣѻ+X%]5+܊Vv]9ٮ DRSΤcǕftOqJ]IxXK b-ˀB\^wBڼRVv]uu{쮫T=Ĺ%]X 5+ZHbrJ]WU.&_\'|UD|>St-)w.9ZCvRѴ>Ãg4h 51ޠS { u^w!3x~&]9u唹ڠ0=kwzb3n Ҋ6jrJޯmQWR4mHWN2ܸq ikוSfuA]e/YulLt崩d)Yv]mPWE9ܐ8+-ڊr%uA]j.7+MLp\+=ܢH5+`qc3ѕ'28цػZ=*pY}mebk'p+Q,啦.̕ͻ]Wzb_!ft܌vQjuA]EIJ4P5הO/w 1ܐΥi]e9׮i=ܢȞ5I{P]9nVtGNj*)wXV=roݛ,-Ο컏 fGbQŽ|Emyf3[w,;޺cYu棕/lgcpj)BZIVOAktϒbg\!]X;qv\VBrJ]WU%]Y|+ҊT)mE]i䜵!]4tqS3rZ>tB6+K5+.܌˱IN)>U|`ǫ8Z ʟjړoѮ5mem׮]]WzJ87+Ҍ(]9mu)ڠb2!]9ꫧ--Ҋ֤v]jȰyt%H.(^_]IaKYK<˝8 6hYѴ㮽j2Z5픙vMoPY5+[ftliK]WNY۪qEW¦-vt\f+M)y,Q%]8s;ɠ +-v]9em{z]-]rҎWZ \v]mQWޤ!]Xѕ؊VOf[ԕe*4++ft帱di)]}=Jt~tXem]-+DVڠeeu]]WzʖDyyWp+]WNYҮ *yM J׉Ů%JCpnFӎ[B+vڵ_Dh*T~ Q]9n;rZK h߮W߲f4YTx{,+jvH\EP=Zq_E/UWrC[%,{{%M&[f-3չWg-e) .̎Ki3מb;eٯn1MTbCpL]p]9hrʣEv]mGWYonHWIq%+U]WYKyuU,ѕfA+=ܢԢp`k'TKY{6u唲GW[ԕXi g$K#]N?#3_UW[?Z\^q"ZYke\٥v^+uت!!oHW\B3r\VtZv]9%]W[)nIWԎH%5]9mu"뮫-?T=Kwr?#Jc닗/0CSvu}{9B/ <-qB߷u,SǠCE~__˗o}׿~/iQonX,9Y,W"Fo/nս;)ZR۷kT|ԥS>{]?^^~ASPS +ګqP/,+:GPш~N'GGΆ!<87ʧ)?S@Q!^BA  w87_*\O~ަ.:tɧeMc'4ƞroS'S!˜_ǾX컞IF R71D)]ߏqS k驓M )>}40\9h~!Zn=q@; 'iٔBT-MZhԠMӈZJ82'(he4}g3sc>IS,҆\f DgLzDR26) }M+w91 :3q1Z3bYl87<=":&x<]rK=3-И8.?lM"XFL!P?RA3!sIşcBk#ZG]QU0L@H?Pj?Lh*I8Zq(s!ciȊeB<3g=@#b~4/rТ@qCY[VÒwن<%]4' \)g{7O"Js"B\0 0'0 F} Z ;>d,OFQ7%t2:1c;S Н59aCX CJ$DØ d)ɔ) 'aџ(S:8aHa,Ҁs1!Ot}޹Smu K_2ZQx6 t 8뙥@s1:n2N]ƞf۾CW0^-"2 t V7L) ˥!T03DZCFߚC<`!YmN-7L‰lΨpG~)O>g JBW8 `P:o qkh0RM#"(4Tws,R4-焝q- ) 5X$āq4Ѐ4a7AΊ#윰|JȠC94#zD9F|"b$,%E}lD$˒!zCޥЏ?! 1$V DbHM$" CD-}Hmf3ϙY1Ϋ G@xAO¢# }'M\B C3e8dRh΄\:d iX[eDfLfʈ_ٓiD@0!Q8CtnAcIXd5h zf9J1< eC?cBZG8APh#!-1!=RTVr5BP2@}1wjcKy'F=w$@QpBxNaFY@mGy{ E?dȏ:MEshӁ LE3`;أ]1_108%|1vP60xhDDN0?gZ{9_5ۣz?A$?$1`bE=GpHEfgϹ$EqlQQKC ÒusT%]=,2Z,@ÚC"Z nYA׮z|ou/+$>Hp"P = xs:p~X>n94*&{P\!iZ*#h`&S!yCs ~C|Q A9JP$rDMW2MʴJkfly%bl%'J s0VOIC5$ʬ$"޲[H@ti[ #T}oѠ*w(a:JMC}%U SCj!h\G;h<ۜtYL(jNa¦g@ BXrrƇJFD9TxjBzi(6<⊖4 p@^#p\( m֍\4*w. BDC. c0&i삃H20D. xp$* r6R ];S@]pB;:^4Yr=p=΃Z1";A7S>Aם~M@6܀ zofnWU)heۙw}bM?^j^-P\jrTHq3q g\/yw[[ EQɏ5s[c<\Td3&:s\M zOo̧\b3/SY69s+ej :r=݇v9v;;hE(?*e5^?Z{;R#q&ԙoPAZ߻C#f(G/@F36ьfl4chF36ьfl4chF36ьfl4chF36ьfl4chF36ьfl4chF36ьfl4ch$^^8}ShfVۃ7!JkhwɆ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbp+`Q W s4+;En%*3^JK,pņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpx WJM ju+Da+2\i͆+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbp5{ ^vݯ3Kwr1]ƷpߥXN %6:x%<h=xa7 D\!`荣+ c+6C+C•5c+돆]Ѥ^+pEQ*W (p<3S1 \!Z/ݡEX FV hUGî(Z{pEQzfW"\!`i(O+6DJ0zp`\!`fuኢꛁ+oЙ_p_}pGE`\=.Js`b>,՗6tnh}ymO+ֈC+2\FRn6TܿolʔںNn£)Th ;xW3xw8YlfAaϬ>_9rr9_xS4w 4o:n44~!P5XPuQ Q4sTk(\(Z+(ʝafWkanzr2]HjE<9fHz5o~U' d>Cqf"N]mrRCM~;ؙO7v際) xfdnrq669v/<ί?ъ]E7`JC9')B_,i{tTKmٜ<-vMN?/:]%ۃv1L%^͂,}-_&.Ki}J׮oУk:[!/SE9w#SK*ZءchD@&Ufu\ tWZh୚F%c 9"--z n2)Qp@/M@c|2n ~/EZX-)WJ-FS9G~f&Z08^?!x`(ljg?8~4$eUCWNތIu$tV\t$iMF#ˑOz1rq9 Ϸ3p|K|}̃hgg_͈ii&nUMxJWRlKhc_ly7#S\ociBbMf 3Jt ]H#¹ʳ~|9/y׭qX);t+˯,R,Q}+:>U3)34Tȁאj[\ٯSQЗU< EGRT㶙3E"ϥ{4Jbs7 m) 1ӜDȓgϾ1zFZ[QẀٚ 4MG ~Hz@P/Wv;Q_&}@N uˇ͙wJCdGD7?T>t*{QQܞ!.jVw ?zI3EJk-N-V;ZϽ`gi#QZ7YctuU]q-&]N+䛔NB%ѓB" gPݨB> JQ#B\nZGg~W*QoJT|}!]@:Bh[BRm-)Cɉ*k[,KcvJo8_Tc&kפD(I |Mi=S_knI0j3:&7#w&dM䣉ƶZʁ^,Tftb<2p,≰r߆v~2Kǽ\Nܑ{0kgzӶܙ tO3&"4YDU69%HiMw BۜsJ;gVj|D]]lhštRK;_M{M,hpa-ڔ/6YK>TZ=fdk1^ؒ/>J-CwAЫ֒y2D8dX2l2y]wJʊH <+M6qF?{OƑ_i&3jH<ٍ1Y0vK09HIQ }դM5%ibw;eԻ 67e~!2\"v \rA ƙ?6쒉S^S^7E~mQ*)&<2(7[-M2ƁT%7P*)SI7BpI JF$1Y=\N*+Q$;k鶙CQ{0rHRI0IaЖib/qMJ>maj N9tQ;}z1<紮^'IBo8Q_N٢7$4`p1 I4W0 H0#L BT.^WΞ1y^QNF60ϰWy Äy}i@5pD[J)ڲ3#} & |.[gRZB#h`ڙ8 *gGNȩ42c^ "[$׳]x;ۧՐ-x2)<(C_d}BpTZ^o hR@Sgfv=(o$_ĆY^QX^iȯzR޼.NqMaU-9˙>4qn (AbB#r~ҿ/o ޔ/|6-UͿ_'c4_lhq_:~kfaQ|t)r1qk i~֧H[Lfb3Xw:|Rg+rWу^\/"n,O eoaueګ|>$׃דq8ebfdRBx7EP?<WۙIΊڼ㟃->LQu:# :c5^I ¾:/s9]WXQ~.nk,ûKpQU|y1QKi:o%R|\]lRIqeHŧ9++]Un<ʶW?'{Aŭ/@:NH'Ⱦ/lR F˪WɖE܍,Yec‡hF \ d Q NM~Hm5?,YQ7w{]>u/IC)Ś֡L}q"vh \5if~L?\")Z}_5ۥ~o_3w,X=P; L|ZOa=\7`:~f;g@sp\lYjr`ߎx'&3/D͋G$zd6 G|5t^i .<fWM3ԋN<7ٝ=؇D ;&Q ]J ,($k2*n"^PGe a NGZi0v5KJB;A . aQC2{ Ky~P*;Í U6SϺ(TIPCyؤkRJO3(CZ %/Y0{`ɣ}}Z3.(DJ!3Y$..xw>E5\HͭrA?bT<#eGҺ~JL>WSGw8+PY?d >K*VT}eB\e:*q*YIMMT}hHΜ9-[k0Rx+tQ]|3_LoimHOjBib'~ˡX"DLâl r noYj}+q}Jc}xN;T` lU҆Y(/Gşd'0JEBz A  %o&^+0 Ռf gCfͿ?ɊA 7afA z[%5j*ZjJۊO"t`ڷcվ}?Od7!z^I)[,=uy{+B{9 JEVv6YŌV^wU~nyg VR& 1A}0AJĨ6xΨjg7sgAwm~2Ry{păhiTb/exc$a`3,YYԍ2]pT셞8J8?Q3K{>~zvqbfaR{oZU|B6|] 2(@flp+K?Mgi۽\(GR=?:WPp!ɾA $BBz{R`\Z{7fKާv75^o($fV>|P9]Fwiq;9fy|0A=k2&?$l̮'W ,CpT(y`Fq'L >(-[n@9jidOH.Qαh*J0!I1`Ҁի9BU2bǴB9;p}~G*UbI|Ը ( `^- } [VW|?`$iA73!kvЀiv?8WÑ/wZ'Ne2vF"r2E'RP;1yI鼺WV@eٻ8ndW6-͢c 8f~ͣt+tyHb@J8CGg#18'psm/fq̪AqXQv9XR Xm??7"j^Zv~tART j8XN&rz UӸ};(63ZCBPB.l0~ps^}ˋ닗? .^7_i9Z`_U?~ڡjTX߼jbmճߦ^dV/jqa~09t%6 /Wͥ{hkr_A8ͯ"*TC.T T:m>,xd~P$TCU~Cо0k#} m$H|@EUL)D0'רg^2:P"p|FWQ6m6)fϟmphy4nU"DS%D4dY'"q͕ jh4?iLUQX=PlYy{+{+Tn32<&vxjuZE|/?ÚN m)9֨ؐ&)f;/u.F}7 -ϝ w}"urJ=.!&ji.j7 ](xSN.EMup;ߑXMf}Z2.oWx:B(>^a1W+(~;bA pFgw䢈jpкe5= _/u$MhXW6ԫHв 楈ԗBk_ZItIP^z_Ո''z_'?25ӭɀL&#U~fQBxn$$`A$FdfmJ&PcpruP, Dy &g )jx18[n>o6jn2O#VKy+`9TKpUD͹QY8IQmXv& g@2>Jd][yp8I49A"Dm 5ƨqȓ<7&'97-s>?WkpJ÷9G䴺=9KtohJbO梽&ϻ{QV~l=@ hD{XL=3Kŀy~+9UAFQN!$H)C>!P :mNYoT9]Ʀ$cO.\.b\xYY3^(~X#{hyW_PګǫӇG~b&9#\'Ж<1<;kx pE4 )&)h/vC6l@L12mrN :mG$&LF%u183 ÊnL:em0kӞ{ vcFM@`9ʲ hN!.BS@F(9k(&$ͷP3hϣF Ixc̷^u!@<Q[06a>yLmac_Fd 3"gMzin, e0-[LN" оa%Diͤiʃ,eqJC#x4[Mg7;eġvȋy6u6&%"oyϋ=/nxB &X!RBq9=I)#1IR+b TlϋmƤc_> ؏a3פH@G<X_ſQqI|[̺eji$=L(S *s!>9S_wl%T*[T> ~? vZ1Y?Y--YU0suGVg2?%ð\1Wj6u^_0z,PީvZ_f[ܚX3)܂Mݷ]+k-(kˢ 7BUffsѹm8>emm.~{s؝.y mͪ@|7{*yÅ2lQʛa<m(otşeܼLY<˫Uny/Z!B4͟mwʻ<&y.sWڀa# a_f d"&/He@K+1 %yLOɴ۞ٟ Idf`$ňe`dFL C$JXa:{Yևk.%ӂM&al`6h6w2$ 4ME8kB "F[ Q+{K!!0c BKiB:M)7g 'id!bDL"QL$ גIHXFkcdyɂn)#vIx$5c1QS>j88˭F#>2տFԿ|S ^V{r75RrF,/mi{ߘ;Ɯ"BG <ϥ|p3{ד5j(/ؔ ZZ*/%tֱ$ϩzb,kpڒ;e~qqDeXN[v?w %hjF *N%R"ﹱ1$IUI$%xsN?&<5<|Bm hg\Mp\|g'ߗ,5&! `ݿϑT҅3ȝyQ?YLkq#?d3jU?_<+/7 bGχ/98ר'_~ RE;OpJez⼨)Yp#VZY/Rz:ԳfJ~Pd:墭A|v zWZnm)ԝly↹]Gշ_T E]/5y?-[Xx8Gū,jq^_l- ]wf<0zg ߞgPY*B4]˶UF)YOW'HW]e+ ]e2J{fJm9j<:]LWtZǡPrh]Jt*BI ]ev2ZNW tZ9Xu5R-DNz΋W+G ձBdvtJ& "%?G+~wx4_ ؛W88ВBA9璜a|[im(!FT@gh:}R hOӈQ Ҵ H!BUyW*UtQJnթ=;L9 ]eO 6DWm҈NJY 3f:CWԴ2Jnz:AaMy&ݡ uFhe*)we ng*բtQB"ztea:DWxt8pd[=]}3tz}F8GñKWGrd j#ށtOW=s!BUK;CW-7mRtut8xXU+th5ivQ2)ҕZzJssnP $ gko7T`n7O)FwDwC{\=MM !]!`*++t>ݎ*\qJs K#v:HR {1F6=VHvTQX ڸ*>>H^HS ջd/jҞCDΙ\SN(ʉ\1y;k${%|b~dHT OL3eb9/"l%2owd:狮IK,ZXa e+z=@dr6sW kTilH%#朽$FKQz)a RyLlL茉 ;U(tܛObb+ɴ[ .;CW[LFt*9)ҕR+!B CW ]!ZCH*l[Lޞ@M٥ xg*5+thc;NW%[ObtIBJբ3!FG Q kW]CgD?~c{v'G#J2 v+jߡp:DWX=np ]eNWI0+tɒ 낊s! Z N+];[3Dp MgN=M MsBt2\25t(#=] ] JvIBU+;]!ZtQ-eOWOBWaCtkps Ax|8]e7O(Ctv;nhM Q1=] ]i0ݝQm9壝j;]!J.S+0iC\JWv;&Nh%k=]eS+c]e; hr|Rw(tezsFp[J{{&J{'NhűPʕVЕفLOW=vfNwdg*=1Z#NWpBTOWHWЕCywhA \3mPko7'޹ƕ+WF`a7s0O psdˑ=3ggU[5c-՛Xz,~{IUB?*U kǴv bZLtn&\A7a\cRL-q*eI*D7˓ڧkNH|1>v~Q2;cY Rzlɝ{ws5fjq$"4}Baϭ PkbS5]|_u o[.>{z+r__}z\1.OϮx" n7ot{݅ok5x_/?>~x!4/~f38ޕ3bLG1?o!=G&L;*8γA4#2v;rmH՝GYw֏+4`,RJJU W@>2M+h\zx\Z rsW'h 97 ؚy6\kfJUm) tI*8ip@RwWi+{a}^_w(v\7M1jW2 W COƉlłueju42~ehhC\N+4RAfMv\Ae4NW% lέNpڻ=vA v&W}*7Y0jrRѸ 'i"&_$4EJUzpupإrVL媞plie(-mAZ1qG Kn̬̠q񛟻v)'0,,Ƈmg;r 'e9\|\0&tId[z3KjŨD[}%lL[!ym1*MsUծ[ T m0NW!x"\Apy\WǵJU]"bH1̴=DJ:W6JUq"iJ3 Tn j-TWCoM1i'wl#Ԧ#=v`ʞanzSLg• 4 Tn4J&Y; W+;Pw M_w]`b߁݉)D`4V~P"_iUIn b+Hpy`k,Rb׎+Ux JRLN+I87 TJTцĕ7ʵӬ٩ZY%Uy WDgrW<TnpJզv\Aݻ7\"YfZ`v2 T<8Tm\}1*vZ熫GU"umkq2rW6JUmsP2 &9nerݑMLn>Z2^+pС'K>p4Rg v\7\ DǸy(\\7 TmkǕ\ۭWܝmoCx sGdRo6h*i(rcm7[vAtPZ W|賵J咟WeR7\";DWs>ˢ6iaU<|ZƤi>e6B|%a*0]Fmd9e [ޙx'"v6*84%ʍ\ZǼ[U:J,=S& JRWq*vqĖ&• N9u_+UWUUƴqQܕ i\H jkǕm)*a6LJy֮ 7iܕJU殾\=^΍ǿkhmcѵLB_\ddCc&• q\An$; Tq*pu.JGW*7M+{Jq*-m:E\y'՝Qxb ֠Hqo7{Vp2GoLn`jqkǴ\% ӏi)ٙTpLJ ͂+UkW*U% WępSy yo׏+l:\􇡏\n4uvDɻbMg#ib_ <{Ϟ= b!~/F~ w/ݫ4v_?(W݅ξ`% ~.C#o(R?۽~]˳7o,/ _vb+;GOqƟd}~fݾ&j/0vu69h#~XhP&&amB@A{mP(䤷6gAؽj%E26?{j%RaH$ɢ-A<Qr\XW-6rJ,K^|+X}4s> gLfL A$3RœDh1=~X&3qX 9\ }å1Z*F-9Z&${ݥ4Z@@s** 7}a}FF!:͖JBai$cY81b5%xѧBW&G+$| 0I@ho__dC̕lu)֩ (Jv>"OBr>ݟ{%ΪJ2ۖk#8GH%5Sh)= 7l WHRc;K)5ݜ|ODLؓ9fqu$~bsƀj> !C5TP )%@$@mMc=6 "$U) uR K 0-&{b>'>h^\+G1-H}( 9YFZd0ssf<5.考,)";D{|֑]Z@.dMȗ? *>щORRb`# h.*u8:AkaFxv~d84 "d)9<Ī7JYdF:Ʊe60|-b",Ai ]+ Sc݂П.sQF@H'k\@R:lzdF)F9mEX m {Z"$lk'PL1R@?[]pT+wGbBA6(:\K#^))b$TPRZD2 .pMl`FDTgdX{jpB]aBMaPw^KCpw8c)(|")a$,A JHCq՚Jy>ƪ tXYpt0wLL`CL.֤ L72D_ßp` #\+tbE,8J8I9nXT5 Y1NRl egPS)(He|A*Ľr̈j }O%1J%}`BH ^8(Rl-@eb$FѺ=al! #AԙZ/wnbA\f IHN ϧcLUkDBLHesœ L|^m>eͳn,ŗk[W0طY@"ی@-$ >:g:ppi?LБMW%s$%ۈڴLUcX}LC=GNźK*%tA\x/(z+V<@(E&rZd^[1P>XL1:,tx$(!t_5Rfp\x+,@f60[.Nu~d}k:mOU;g l;V.U$c%?7?wyv]R+`+|mi,#z,%\{q:EjC$*jPK_!80; 7PR"hc2ψExXKrFE+0څk6FcCX aI !PVv5m1zFF^<-Qġ7pF ]l#čs/"t L5PZ|MGE$ن7Kƃڰ ǂpN"fcrRZ(UKBzОEwv4T–Tf5PҚ rz魙*"+2 AK '@uF>ލ#f ifk9Mm/{[D7/@Wlw͋W^+!A0u g9{==k;8nd ow[Ws .˒WxjFFEHj [M*vdK[-BnMMjXcm{Zی֭Ҕnhr3 ,lפGHaXqX@yQ{@wt7F@j(\8/qZS$҇n(NFŀ^: UR,тgH6sѬ(Ǡ'X9}v& $PEfXwٺ3Y%G$+@j)"\Mv}\ltt}t9VǗߎv}@7)\c%۷֥#C)h~/.+S/=mIFǢ%.NiҥTrBCuq -x> o ӿj[ono=_t͟?]!_?ܫJ뫡Jc: ovv]7ICu--xsȀxyPzg'hf)M.u!jpܾ?=^jal)m@;Z9ח/p.zx{feÚEM6k&ິʇ `L>d/xc "*4O^ާ$nIJ=4 -#놷unCBHlk>K{!wCwqhE!7C BZX!px,1fbŝi̗Ͽ_.vEN~6O P]]G0^l| g'i'~qH~w\_~rk.6FuRCz>}~7Jl6m5KoOPw_\~X߽柅1?꿏O/>&LJAuOL=d䳠낔V կͰJ-P ؐ}xyS'l!j]y?́׼9O qc.x_< L1пW=èڳ}n(uרB=wDc*?`TAݣw놘#*:]=; GxȨuT}=mFߎS%`@Go!|3r6^Z)WqEp wJI1CkW޻):ڞwQl?|H5B{Hx耲Ku(.u P|.uH=Lz,\L`䀬6s@RV}1QxZ6p[Kk ʧ7D"tzyɁ;3xnwp_.X{v׼\.gzG,iI޾3ƈc(l2 HrJYn@)!U&i!R wOLm4U&2zp6L20R{8q2Xv2XW2*)(W$889bBW2)+`|+l+ b+Vv\JW[ݕep!`&.]zwX j $8(,wJV U!Z9Q%˕+V\b*戫QTga܇4br |Q}v]mzݔOsjχZ_l+vqvn v n2t!ؼ> V{rNt/0$Ylb(;3E1)6V齦3Lc \f1e16WY3*%@H09b &b9׎+R9j>'og9!,Q Hmvv\J SGk:+WVlW]L>SH0.;޴EjT{힖 pWzFH0)+\P=X%(+!ҵd>.]>9 ^KWA3`ai H4ucU!}H ^ X.8)b1׎+Vj h^4 rprS+V (f+СZ'9kv$71j2Q9*b XnJRpjz\:j>z'( +\Z4Tj28O\ X%X9f,׋IY-T+V+>ȘlkWE7+&.-S&UN\]o hGbprKă": q,,)"g1b!IU&P\WptPIe-a2#ɣvo5m F/' fSUvSJKTfcbzΎ?׏+Q XnUbD(|c}g Mz*ģw:9H;E<µkVկJ4y=$Ie1$8y}O+TF戫!+bZtv\JP\Wѿhա"Ͼc\+Ḳa]o͑ .ɓB8ierLBnDJ_R5ZcކS+So](7\;lHF qT &˵(WcbWsUnceTX84ai=2)Lr`V YniVRfb:, W$x/r3HEkjNq5C\< VX]>p\JD qG꿈ʤ#R HQeBpjW4#2S )̑+pZWRY '#W,7H`Jq*FWW]oL z"hNi݅jDUJ<*6e$,W$כ(Wab*f+cTSy즓-,,.}Z:{΄րilMi;Lm0cUBRL>`$ˍbpj$Tˊ?؏ZR(lV}LYcBRb)kXF9_ԝJ;ŜR`@Y)BY{Vy;%D)DA)vIpj]=f4Şa\{IߙY01")bSmrYrEq5\E1 \4&erզ?`ʬ0戫%v~:\wO++V v\ʨ#kknFŗ視pY[U:هPBRrRm4, EJm" t7֚2!r+ӕGu ]HՓBI1wm\`0 BOD)zNZW *UFtЕ˳+v_P*V 2GW1 ]y\BW"}+@*tut%B(TbtHQ֟KM1 VI+O#oZ{(($}$0SHfDWTЕY.th5}Qճǒ OR ]yțRBWHW уiE>ԹЕGe Pv(tu8t% =hqX|Af zޕGh+E|7 +N+Y6tBf*z,tut)f$',0r+ӕG_hQ͒so* {NdHB+ٞNP*ֳF4huUBfDWVЕgCW-=]yL:@" I)2+0b<D\ 2{BWHWBr\~^as >W@sMrS^)MC ^!wih;M{HӌIڃIǫp ˅<}]~BWHWN&E#o0T}>I >4gINbb/K!mW$ۤ;.dQrFwiY3,ea.cnp)!:^W #RֵS G8ǩ3F6Jrt2P-DA65x9t}Ԝ#!tFX|68e1J@/1 ̈<`+W\ j}L}C++ѕ[>G-ѕGB4:@R\iSI6t ]pkʣTҕϒy=U>hΆ-F+BWbg#$(UjO{w^}%wZWXpރ $CW.ЕGi(+tutEJB41D GВf{G%-!}gi҇Ғt_W=aD~nN/{'2+\C&%>ZEt'z2rx5 3I ѕ/<[ӫvOGS卻m?:,Am*ڶRוRmͦnYӮj8^*A]A z* 1ba9o2).,n[ajbA n8)NXZFƜoV#BHĺO_LO^]o+uؗpo溜,c_g60էj֙0I~1/ بKX'v>3yp29H|.'x9p#8=;ՠ}y~`[iDRe_HiDT~|3zRa`t6+d.G{aJd00`=GJ)ʆ<\)G+qʣTW2]`+7޻(yC+}k 3O02 ]yaI{7+n{@#D=/0ʻγEXnUgU-D,]]i,v۝kz*78Η/ [C w%x)sl?s 39V|8 [חP;WGٕDge{ Y[[ÝSq !-N4TuUtrWE :/gZ^Jc3noqj85r!m)kNf8{qk!5cF3D7M Z"1\p#4іQC Llee)n6io!\hhu7פ{{rA\ _mIAITYh{t[F FvIz X.$2L\T,,Iˬs]rc$ \7T@Db?UEi@gՑ7iG.Is.MNž:L`LC׼$%N\QrFIer6 AqA% NH t z&s^qاGgܦ&x_` 4gYFe0J2^vK2q\\x3LHy3r'D݋ P܀`Yr I%+GkE}6ox/2V]:I[:o1*}2ݫAJ E8:#yr*@ !vH"= Iz$Ġa<4H 2G>M.-z% MNۗ;y e:p+I k{$iVvf3닼݁e1"4HpٓYM2ڷKFKoYH,,)& ^$%fLXJVmL/G|9ƜJ0-Kvʝዬ%"xX.dUo=e召%. \\z\J4(xѤj2Zܧ|º$3"4YuURDX|Ȫޥg,%˂Q^YYafO2SFif,$3}Cg9yp?F"S4kE2e$6e&V5YTN k (BI R5~cӄA d.Yɼ@RA+A3t93|O4T"HYĹ&ED.xTd\#4%\hd+%!eY,b P].zGGR(Ϛ&FV(ee㌧,%\zf,ݙ$-3Vx)$.XYEK XY{FQhϦh`rS\^wQANEҳI2%4G&I\:l*L/=I;in3N]LCLX~20stqk6A][glz6?÷G{pj5m?T #n]1÷,ߡ˫d +ڶD)Is"E֙-k.0Q̞ۿƮcВ5#;hIܡ݋Rgb;9m_\X8#n<݂2;3q |ߜB;? q:k z?iԕ-Xm8U9jiw_/ݿ͙;o:Im#O̭OǿE`C j* AfUkLnu1HPut;;wsR0 SWaԇ,= ^xGx |o?րiaO 7;/xG}2ۚ3&ۚH<sD_]7p321,];;F=ݥծ6M6/ǃoFnFXj[t&cw84*0QX|P|1`ƾMb29 GqM`Qn_blW>gy&_'Ybw|^w/D< Z@ȝj^SZFG1{So_TQBW(`cof."y! c~(;_%/)Żx5EG;t:*N] ۞We1^ |w\S;T)JTS %˙l'5{🻂 83r<,Olupy#۪x/_~aʇ`4/g =qg멟~zڝxZؚuRk)獶qg¢q!j̝t5ePI9J6k?^\џMA;M_twzDDj\i,vN x1 嘬&Ycl[;JČ%LF6T{$'LiMWƓm-;)RPsZţw $*m2@8(Hgai"ma3eIDZx:Ttq::bQQA-[eQ^_?䅒H TfET$oZJKyiᯄg댚R`%*Q7@실Z_ux=&{nR]Đ)hdCgC^JsNyE*fe%.UE >Xn}]U k- {^Y<,V~W?V~~J>^ h{5=s8%mVXo?dpo?]M^_\}ʕRǟǟccW;kS yW?'3Y"yN>dy_,~2Z| 2Nf&d6X4^G?b}ie/ ks0Zz7v?B P纸S^Bp¢q,:1q^oB.]fasҫhyX !V/Lz.ZzB-nXg$@iy-a $$1IXamg{~徯7Yͬ~R,v%&QZ3kZw^bkhTYN adO "0,NChK*Rpj8B>#{8 ЎWwC-}G28o^ Prfy}{3-ç_Y~mOXEI!*B I%6B>8$9 ?9KJ) =b0!Sh[lrAG/(x 6[LNs|Z֭tHJKc+tQ%e$&;\qDo3"eqItJ=wb!H$o]?%|1"z*_ԖP*:T6A$]Cfj@JTJ(tz{/MH1h}"+FTb.kOC^EBh+s(nn}@u4ÃxC)@`.+0karT7/Y3"H"w|sС>-ݸB^^@+{'tGMF!,Jev#q-+tQtWfBJ\V2/$*ڧÂLI>b-áɸ ROk)+tM_ʴbi(* `ux,*|l5!W.&|ߗmf\/[Խ缕*r ⤙m*7PGu0tPNY}c% NgESQ1UZ!.@W)R RT.-$l؇ĔH;-Ω;JFoXo1:Ĥ,Uw/Ī=n{n2`r[ʵhy:~maCYHh.GO8T1͘[z<ޚ }vd=De B |~K0tSj>ԏ N,MpJݰz}tP.Un-scۋzyy(>gG~&8; $ /\<$1AxQtx@99}EO!0byb:*_0:/~qMw~o\,~L ״}iKL_ク}:8ntvM@'2 :~ {PAO2UaԞԊr|N&dPPxُ폼%]𥳐xvzEyPnm)@Aѯ+${]9J_y!޼a'P?!ǥZ }.P4å!utk"Cc߿z9;˿c} h<2RzG':R?sdi[f fًyNV?8>8Te:G<+We# Xzz7q?ǃG=E#-M FM&ATQ` XB9Ji%zYrku`yUaIrhcCr4K>5A`AQO}> `t?gշ\\Ō_' M'><`odp%[[-$5J uj΢w Ax7QRr 5[x9>M|a_@WxP,%+DEyݜ"oF~@EG"rp sYԐ0GP|nJ FEQ<4.,;{._(3*}BUHe 닗ec@H>uД ;ۄsܡNr *2~6n43n_YI5(# EYVre ;-\e;X9Ӊ;'<7Z>ܕII +dZ,bZ_JJ=Ug{9lۉQp/|jq>RmTsĢ0 2VHXJZd\薈e%BrձYW!C-#Ĺ|!I.ya=m=N@hjRWĨ=>UW"6Lr㆒yx7~2խ8( !wJ_-kƊnwm:"zÏLخ@əA8( Mp,rJ 5a@*7O@[ԶL:lGlC{x਻PF}]j Wcp8p^"J ]FܚX8C;憎^[tٺ0w u>vKyL챝~} eݛ SPE뒀ݳ%$a' i}3X^D0+Ʉ% 64/ӇHRخ_o^e^˺?dqL55Ge:ota+2q awvnc0IP6WٳLj]?" oPNl ٵ?1sI3clr1w5Jh $$}߷âJEU8alˮWջ2#@ĝ[؞WBF]hJo!v/8:510w٪xy_0ߖ1&c 5@Y7KGطa3#a%;iy/Z00GAkjdd<Ϯ.pUi\Uעx,;:)ҋ ?+4j7|*JM5j\.ePb;i51//' ̃Y\Q $ʆ9iszIgy7%$VSN_~sת$ap!9dјCk%oh{$4DC v_g{S٭~z(.̝mHv*{m<*?Tc5 WPpyi0 P 4>>9}U79zkf/_+-󭏞}y(9|!Uggq6Ey=Y'`4,(ǫ$D>C3nj UsI.PzQ?>g`>Jl:*鶱gge,ʿ–UT=-; ;q*0t|'4Ed##e0jnt8xirީr.)F R9DYl *p9:-ԿZŔF~FYaE!hus)I0K?)4BF1 n֔ϝjFlL}ZQcڏR\ؕgdz{*pt-'ѿ?E`8z$Uͺe!D00ق?]79^?8dZcL b;` _P8/m)\uq!{߮my Z$ݖ;?!s5y}BZAb-6HrH/_|ҮʋSlVgV}ŦTnSkQ}7?,\ MK+b9V/]*o\^, b6~4gih B?W4:r(g5 zj,_8*CEҙ2KtMqG!lCzU1/袲Mw+d`JR) 0 #qJ.lN !gEjZ4Vm(>j4K.`^A q2dA\hQє pzZ-VP;G;䯁)EZS *-hY >Mʧv?qQ,X&R E__mV߯?[Brs- )]*"-avZn-RQ5/Vdwv!.>ɳJ=lURPҫ]`""N@0¬.y 9h5c,O`E%Si"ǐnq",.e!) Jnk_폡R)ca֥F VZRI30{#;rvgsŏALHs.~qPӻ)%LInWiU^Z<+z1X0<ݍg WZdw\1|BssG`iw;P[o*(1<[WQҫgCGx,#H0OΪlC@-u lzdhz }ՒuhR}&S|)E@M/dByo(5$v> vvu9%eHT?@ P b &8hdp_b0gCNCKς  KZ*HqDiяZ  $$Q{Ši\=`XGYD'C23kԅSCv$osi0G?5*xwKgCVLd JAR h( *BT"Xlhs &)ם:7 0E?f5Wr͎-7$R)܋~} ,DRĹkSEGYb&|,UwjYӾ$A\ ( 8RR=fPF'7> 5M2 n$Zɹ'4ѳtӮJwo;I|-Ai4*EU\QK}Z[îal,BK]xM %0 Z qŭ" )b9(u}`GU p.'O-o$Xb*@ %O z `q&gTQ“,6v#]|3fz7@yzRI[^c쵢F!ZJp;^($ĻӦui#HSIGu`.e6(V .yBN,wQ ȓQq[~EL/ϱKF( (@ NNV7;Zᄈ#$4̱3RyUj q8VZS\AL=ڷsw[U);3~Iyq pH9I A2PAG*r5ͅBf GʑZ!>J``x܂#ҡ0;ЃCav= ;Ю$tO2ҹ̹WVT{zzޞ*ǑdяH2WN9MQ'1 }[8;#塠,Ɉ_<ɰDnny2qN__>q#wl9Y#'BXkR:O' 3*~V@Lm+HVeYUO@Sffu Wy(3>F!wǿƈ;͆ukAٟ+.0U啡WW-8mQ3^ŧ7q ᮃ'c}\l ?L޵in0tVcLd\zd6{P]:t:R,u\be3NrhNr4SE+k@vMC h'%5FvgI꫌PC*¾@5ֈÌCH>H+S{P 87Y{oz@[b6hZ   u"pnkŖ3} d R{*q᫙J%iodeǶ. 'sȺ0G \zj_М(ev3U)U9RF2C 2V2#a+ѥڳm1{P |tqhVc ͕ˁ0GM E+麘΁Bq_+`n0Ǭ*`^Mۮ,GÛŮR54rzRDv޵eawnJ {T?H#1':a)ž)]& yFP  Œ&NAkYSNT!HB1# N 6q,1MDs{xXlRkcB^:ۧm H aD]0+E7wKLfŦ8N6Q"Z!s6pp͂|d!;c? sAhe0d_C±%^ҁ>^iQ"h[dГk('܄;iNp >9%+'RhIUMĻ. 3Rr.Ŗ0zX[ ^EY ƻ*oWXV) hzm ko*r ?Cݼ *D\}NXB%Z,VI人АZE3G^Of#8><p̵IfȂEcyǤA" *ԣx0+%K4 aq! u*- ㋵Qln[JmCۤLQV6` DJ)h)6bo,'Zhy>qmX3zϣe9/>VYn4W+ 94+4H[#dſdM}&eP:+d<_ȓ"{ʣ俗Q_ba,>Pa8R:"aFK\L1indɜC $RvxiNY zUTMZ,ܺVK۾޳ &y=qK$}O[.j0ëY_Z`0GVqt6kWy/9ٞ-@csb?.\&ĺZ5ժIu|`<a} D~Urџo/?_d?O}k7ۈA8-fSg>YZ4ۥwB+>Ҫ4ƚ\jiG[e5πklU0M a ZjI BJ04V&5 1R"Q {8>3ὥΗ!lV"Ya8=zxkVu*߱(;򔋹BTlx(7jBjQ[A.H R1ĩ%R Y "U9v۬or^T"VJI w\xrV$eRSV&R7h#p,Na5Pf" C@F"yD'[0OOt+h}>ml#CByF{٢ 0"U'9F$ϒǠa~cs`@{Q'#9n& C(g~W[haB rJBU8SRILbyjTS!B!Yލdd<-CϘ<sLF0Z(ԪELٻF$W f!@ށc+)EQI-}#yHKbHd薚REVFƙ_ @;[YBYHLCI)Z~Sw ` zY-l. ~Z+hpᒴ\>~:I=Pd 4 N)D$(KOjdM{_!tpJ9nŗש4B4T^ݍ* +^/9~jF'Mf%nш 4A[5c86eޠ+NQ\OD9;:-Runqߜ s_~w̽pwz6}oG::)B>O;d?M[\Ovlq4:pKbOXDR"`98̰4.Do"J( #S]~h@ј*p q$ Lr$drW\q8 AȴA/Cwwjt@/ \=NճxK%ݍ\KF#-c6Kie M#fHT\ m mߚpX2]Cjlؒ|llPkFh̳i/ ?+ZuSM/>j*.՞PQHP붇Ǒ֨<+s@u4m*P|y6OLH}88ļ3{g;O Ǜs'癑id#Uo1M܎OF{) @?rseί|}ߣN^g埙x*}8+O_ØnGeN] 6TqH, ?Lq:/v6o_7Us{aӕцuGpLOf9kbI^KЂv%NHFNvۨ~zuck S,Ʒ6/Uj\*.B,e~ fi! Q A$#%(6Zb "I:R'ypSF;l8寀r㰛~|ra>g܄i}H ngD j(R0-=K2J8q peս/R7|Q#IoΕ "|sby"CNX"7Hcs^UH-)|ܣՁ%42l ڍص92Ü6,shGzyڨ<"QQO*2G)*\H W(6j|x'S@պ{! Ԏ%Ak}䋣K`KЈrnZz5)%`Dt/83[z2oXY@WH5ߧR5mr"U[Űs9+3O%EqtߓZJY+=^G cJ]Ÿ<@p.{.թq.E "L5bخ6hTt^C40I: \(~~}/%9ӻXS`[M,bx)|8 (0@YJf:)A;!v Q.'%jSXܔ"iun ߣ䘦c(F\7' iiDRzl88%y0J ⨱SUH?NT]w8?rboѿ6 gM*TE羃6M%#s'k@@q B呍2"UȤ'vLG}.FUY@k-_ֈoAOm+.kat)HJ1Y:s7qzh6P._ 6Bټp5R3.%9LIe8g^-xҠsaa4DFx>TJԇ@>H"(]S[%(*c*jG-x3oK}fOWel+iTYz[oT1:+e߉Ea.0(. 4޼L8YczGo\lL~K60̊x(te<[GMCE],p@ny2de17q$D&ծ|m ;`͗ WbUA!pmEFh#iwpkao1kEj#JZ{3U?fXU$>r_oS!C"y׿SjP(aa7:8KmԡJ z~c _~ع:PQD/vE3O:Uxןbٺ:gIف3S9_sS}3O:+k>ة& Йvh|u? wq׍I'L@v^{u8uɤS:uo@O:^!Pwmm4rv E@$`>,%Hrd0۲%nl3Xj*]/"7%v]/mJ^ȼZDvdIأv!)"=kvR Ó߭{چYJ1>} ж %P &(TܶN#82VI= ]1@f20RNt'TqgþxɛG*Ytzx S 5~q57k8aKmw×d;>:W`*1GNZIqZM>n6^ZľFdCRT0~O* feGޠZVP6X%K6Ii +qܘ}2UcHvWt8gc%ƊLV3td6cLΕ=urkxRN]闙ܠH~kxL}&JkhY)6+NO -{?vHQnn?TȦd+w_&{ҡ&7T.J~UVy"ٮyVi}Pnٴ #Y! .@۬q).h||Ïix"~WToH)űTDGv8*0tFAw$&\B `AY)܈%^./)+:6:bU%YfQc uI[33ȊC d"p~6g`$ķ' 㬙㙕AiB.-% RԨy'yC 4OGKe5sr+<'kpBe500f:3Y83Rju+fR@ϛE`񵚹Ek*|y<s(_LO27?bў_(:(:n2:+=&ZWIÄZwr@9pSGuFK7[-ռknCy(Q#4h@F}':,ͪŴD#~U5s6hޓo}1Jj y_ne6*L N2uՠ< sb˸sK|AY" C8 qSӯnoT Ԃ{[+J2xj@8UYjp&KĒtj$!yz5UzC+!].q6tP4:@arAWA@V@I*Q@ 1ưܛ&+hn6o ʏꑋo::G0 J||d^o?=f1\ "_ +դz:}۹]ZCbm~^tjA8WZeɕUR @ԫuU:kPd @%ZxApP%h!}zJq+6&0&a㊉Ua(u(&a?b*l /NVkk(e="%u]\f*!P2L) i< rಂ Pʡf0E"@$^0c `+| n5gҺj3x"D;gu 4ֈeBx֤EZ2KM]Uw1V(1PJWk#U@" yS @Wf(i$5UȔAPVPp4qpZ0D( '` FUeAa\1wmmHy9 ɺe Ogym xMqwh˶,)q4rӇŪU>42iU'2&|}8zRbbu8yӵ8̀b!&ƣIXh|@)6s\f*4 "Pm{-r34oiBKۖVC_΁ T*,G@E_pn{7 >uf4)ҟ.>okZ[( K^AIU\" ReuQ$@R]4u882 B[hlmyb 'yD!UdϦDR|dSQ23Ǿ|/~t<+#m1ҶpSʞA{&ZNw^`=H\p%] s;U7RanȊ*Z^\Yzy͌'䀜u}>ܾ2:ckXc3 x/<@+nΪT +1 Gh{ f۫vm[ f݌#S?L٫5uc'+ćej`gs5A p:c@n?a B X?́JŰ)`3?71N'vb>'h:c #Pgh";#[F 8qyoy^s->\xT8k7G<}Ŝ V7a ~߰Н$GIÄoE2ـ#Ti%X >q!%&ԍhƓmq3H,Ыm\`6 |iǑ7I$ sH|>L11M6ް aB7cqh:!to븕3s֬T1胟jG c_rASk+Ss ,{k,uSWKY~[^G^qa['w}}Ms$OCf[zڥݺɻ ?VO߷̧V>~\ @$D,- Yl sQFw>$*!ׂ" B0'D<ǨQ1a>e7a( `1O X,~z Ir䙾HEBt?:a*DWnV1O$ԪZ cǪ$*t# a "Ho US^u;lH2||])K7P2u e^ +.S?)ySV!6rQU75ҁcg'~y?2"Dݘf AeKcIz dpF;Ds(J]]k(jh |SZ=DDOy0MbJ swugCj4򪶉ڻyڋ70bL+6䘋έn[vVQF!P ^\6WCF q 5 K0ʷ0fG+d4G0>@Q=f|09/!(>MĦ32N5QYRٌQe+tSީf2mr,gPvÅL(%ɘwj]6L.+4MS8:Q|UlYv-ӓB~5 i慲Ӻ$M *a^g?սF]?X TM fXV fFг~9l lKLXѻ 'X3( ;Z?w)5ٔƵbŤX£($hSdlI)|2̀ϪM_MV0gOf>#61ca덽]F$L`E쥀G S+~Y`Cj;}]M DbܾivIcӁQ=pyP{3^CJ\$a$ P.BFJ(N&'Od** < fP[9Nh٧ٵ6e:PXs9P^aؤĭM@I(|V1r91CR"Cjl(nm+1!)(m.¶)#ts&}ʧ/gb6j^yۯᆭՀYQA]62$Ԗ,g)vHJ\\JTI&SݛXe! xcvp @" o΁_Wv!z -7/Z@+$qs}6Z*7Lz/߼zּrֿ$`^K |bӓjg5_sϵ~*tOmHʟoShW%BRy˝<+JfGwKԩ曗+x_n8o^.V2igw>Z.oH\. 7י3ʀ:"׶*ZL 3(q+EpOA~94T׌;>=yF;yȴތy2<|3q;ߟJr䙁 ("ܯ:އUI7z 1!)IzQ. !8R* QZ{.Lj܂vK9c\LR)ɤx3ɞ},g5)zgεWi1OATl޷$9Ln(tVBn4:ใЪ Jx^ܿBab`5DM'Q{ ۶ތy:{(Ukc#@[I k鷂1)"Ͻ愵m, ߏn썡m^foIr䙈 Lγn=B~Gsle*f7`ф9v6Fܛ♑[|o\ω+#`ģU7s+lv}%=d!KSfdw@xXnܽA-b wIPݟ!z, ~He?Mtq{s1iLA g>)5~^{GK#Gt֤[X\VSg5Yz'BtĻzZv>].t}U{ >kBOx zvΚaNJjӿ/j+7(g1$]q`KsM*ׯa{L}0sDohF}V(z*Z>ِk(uSdwiuKxVo?Kq=TX@[oTLlj=.B K)SG\|)\kHH <-e֖dg^қ$",ff3;ofʳbσ`=`c -][Ǡڈ b7#2X͢R1a|5Gǜ pN,ƶJ0tOh =>ccdqRgЪ{V9FD/H{b*!^a,lu%\R%&|odKA)29\SϟU-IȭR 8(l3IJ*?j)0VK2|1QBhkdbީZ*+~H}BZjR5ÙOe LTuo%甛L/Н(+HrPL/kQ`A=HdvNmrr@M:p~#gQG.urB_ƩKܭ~iPЕ#&$[jdP$LO^! <˨Dsu18s4y&os}H|[24>~\* 4|sg%J*T^nd+9J%\Ff s!y`MЗ"'EPecݓlڛ =sAbjQ-5W)mBc$K.?To^ҕq7-v.9 zqobДV;yV;ɛ{;=g}H3a9К w|Ul#ԌS@D/!vqXu/rC2y֐iJI#e!dbt~CUPP`m9\ 1/󔴤}UG>`#+4p=0gb} r٤e/\UZZKS kķBV$tlYv>>*-Z 2HM˪ɂhz3a=".d[(n_[(5yY\ ŌE? S9j#7Øbb_{sYSeԏRvWS4Yis:R:{04K+JxEhȵft%0* L1-i[zB[]( ɺW%*'DSV_Id͗jTY (eXѭVC`:E+,iJqr\*kIZ ow~lH'\Vj-f)/dG(&KjYSqce|qmd DsQSUZq%Q0Y p#~YsݹvMbr+DDqj:U)Q3Z bpx $ n=G3{);T.Uh:X,jqaRT;Q^5;x3OfGuGw}t&SMG<ޏ][FP *#rLxǛ#֨ocUx]iy}]xkhwmTU^fzU._Di[^<39yU u^1 . .@sg5ЧͲYe2š6Ɉ́E%9 s=HT5dQ|uf6Y$9N+.&)<d?'y?dm0Z4Ly>x®>Gw)`%#h"̑EEŖn68 S=#yt#|ۤv .yVԎ<<_C-;wճɱK3_SkL^@zȣ)^~|k%a$711UOν^lQ]g[BicwTMRcLP"kM`QblDx9]xѯ~:Yoލ?f_lnZZꋶe?]s?.}3Zc-X|z1HNw?>%wg'طR#fݫC:}CϻƞXDp=[3;~rЬ^??X"Y<`cs#Ci!jδbe# ms( S6~k~;v{?r|x-L gc¦h~ϱ..6,7Z^y +,B- OC6p$)w 7 &ytٷ&d1z{)ʒb9&v`6i}6sTރOoٰS"X͖cpoF̽{UG(&3Sgok͕hg35P7&Mvl3Ԗ3Nڔ[/~W {No'k;oUןކƜG86J|{>{|E_o[f]JP"2e_w}ma~ !z~N FQAC# `.`,2é8zZTeCøsjN@]ntGNgU}BP0DMbw5!xuB9`8$vDAe3 Nf7<=vfÞ3( E?dvWCA<0;kJK7QÑJ숍0ݤv-F',\{]m}`U|&:(]Q4AKemJlhb؝SкtE)s%A)& c{IƘkEWG؇ {mr!g҉C~ԇE@Z+e?oKFvv !tM`84Xjv]m)$h赋,!9W1c'KMqTƫRmV9vCҘ00lJXC1u h@|t5*q+Cz7~y$-2-ϋ#"ʫJsZmQxg/ Vh*)֚sT F#+ LƏ "nz1Fb b]7Yź,=O.+jbv R1GJ"QsC吃qKѸ==?tԲ@"7*.ek62-ΘVvQVS51傖,55cӅpwm.DB0Jc甓Qt"@W R%-azRCJl}$B`{Ϥuo #^UccotTHӒC4^,[ wW3 $%樨 Y8OfѵaڅXzߓwS 2zpJᙺKj!DX"ؐ&Bj=EdcSRʘCAXo(] / %do⦈;Xhae[ae[ӭm 5~1n%p+Y[2J}5A%;NYqbb`r)Q7yFGSUguHoS[d#{lv^x`jF浺Mc@9i~t|=SY޼K5%.q跲m1b`t kʆ5)`8+ s.PQӦޱ5v/W[=P RLNj1hKayo7ݙ(cab} reӵέa׉j~ v΁sC<͝'r(o-'rթIgݛs KB╢XRGqG\kW ¬.v*~_7ʃ¬uE"8џ9C1Pv/]PLu7|9β7SJ\(sRGc8J"1σiʣ'C'<ڭ0krusCewA1ʤv]ocv֌*9Q;<%bz&j gB,I.œXTK[ 32O7I0<0Zψک=i/c3i]Mjw !DP;d4#\ p0G.sMjw߈x=vjGH3nW#v`Mjw kN` #Ow2l,"sqZYcRF=zSm?EΤҹKzA$cfpD_c 3 C 5zCq˱N`44t&1V:O8%#0>k*`cs, KPPF[[ew) Zv=TrEBD5SĔ~İ$/.s+TЖQt?u;+;hϯ/|S/o cIb\,B֛DLFLer9Qd;͸(4I#5(yFʮW0٧5/nSOdu;;ɑ(ܗ Vrz~nmP~ }y&QTuGR;eBmt$|5lC 3dЙOٛQse׈ޡϭRR->?lzɰH15GfȈiM-RjQ vv)9O9WڋYgJAcTZZF?YaI1b2KɆ`jBdrη诚N{U};[D|xcpt"rws5+R.މn:æpO+&b̪9Ö9Cli;ݱYd?toh{}w?ar= }?]lhp)N#r!Pegم0Z7tp(쌶*UeS2F)֓R접CR2W-(ɘMwqG_r]o`ؾ}9yʌ)J!%W| MX؜գO 8)xtxus/2#9sfq0E,f˼!u~ӟouی&InnWqZ}ӿQP}Fk5:y5]^}a_>1׫^/ƿs^_}|Ѧ~7OaLD%ivS\~K#x[JFۥ.봋:n# "gJЦZE"c (WᘛL@hz()r m[GfLx9{jnJG.p ex/8{^Ť~P>o>ģ2$ N p0U(_,*`<1Aۜ`]=ԝ2e1ļIh]ꅛ6- $vKҜX!%9a7d6d P4|bk sQ;ЧWmjTXڐlt":hC&LNyd]Hp)\4h\aWױ~CVk aE鴂.LVFk^LZ!ϏFviQrM41 !{n`LQXrBfZ+6z  ej D+5X#Zwk^hat#I"Ib3928C~ZgQkX?tQYkKv꣐W>ڎ:lh`$jּǓâI= b"mm=%'Igf^咟k{5-]صV#њ-[0o5'3Y$$sKJP"f2 Ӏ9DKǎ1o ;ʈ8oSu)ypq@Z9y@v} }bnPyY UV*C oI h̡]>kᘠݢ' @ua)S Qa@;grvp.qʋbACvM7qrrvr@Ь)sr L,(3wftLC)3J.:g36uWU``Ī3p'[~'mT&M-Rbz'LfxvY5xsa  muҤ i$R %sL[tP"s^o.W77o[\Que4E5-VT`UVft Vw=IL,X=0$b!j7ȝSZj5 _xVoFc5mPe\aޞ+7Td4jhүx^ m3T`W rtדL'vC+ML#Zl[, mFsD#?鼒{7uN G.E?iVm+A_҅>l4@SESL`]Bߦ,}EKJ;w[Uw5n K(T^!'ρ M('pV] {׆$v(0'жu-:j;74YeGEtP֗4$5b{ꓴB$=hl B]EԚH}ѷд&D}sKl cQt=:/ \"rs:sTb[B/!Fa:v˷pO Tjceq̝mw193N:uкKm0k&^M7yF8+-b-YrV^Uw"[[:.?RC_ ApɝR>YU?xEu@ͣf'+UǦF%8T`P u['p'?ZjM/a]4}{mLN+zc'HYPIh Ijۊr8c݉ۍB]S5RmydVC\S_ؑ5΅ Mmg}߰/`w#CzJcǫBwluGbb dUM(US7\f{hOQuI[UN4Dˡ]^ 65QͲo, Ϳ|EnWmR4Ŝ {AM)G"ۜ?O ^jw2 p4]oC& Nd?b3gwnI? 2ʌGU*!0FR_2E5CAw~ L9IU}޽d\Ƴ[b\{=$i~[ֵm@))0Khd eH{ u4]y1ҔU+)iwY4PrJ iFn~i*9)8(߁puъ2yo0/ R/;:N<3kD3Rܓ`))ƈ&(иSS%)F!S@4*P`Ŀ[r &㩖dێM%/.&!WaGvWPSvaXz q]Nn͋+$Q<:K!m]zOgE$SJcWvv I$\>$Bs)qWohQTpLO ];~ +1Yy!)t␌5m=%/x+=i{|hX5!H y' AC,)\ bdjN482Hq(ä~S,ş=s61M.:OLaRE [2Cv4g/nhBͩH(Ю@IƀZ]|v.[ɁfKwig1E=s=˜k6 FZdC'! ]&; 5؂ xG9U%5AY8Zw~E Dv)Q9k\>_T4CvڭT})re@;gGuY-nA/-1ڭo@g휍<|]oR hFNUf(LZO߃u՝vx5LkBqg:gWvNBJf[_"էlYG-.׶ﻻߪo?sN3a OhNzK]0AӨPf\19..e<{N9H K#69"lȹs$( %X@*yG9O +~3XΗ#J &T=X) 1 #at`A S}]H ;h(w[9DP8EK jI +nJTQB籛f)4-)n@@v[F֏\n!ĵ<6Bcr-@Mc;!YG N<ΥhF) f^v깝 {l*Aѡ_.xRA:v/ )Wxp} Zr<pФ(4ahĄB .zZ9\Kb18&p1Q)glZz^pFbH7T}G[= UXfY89UH.k "b(W;BQ83)3|>^Kg s&NjknOo';cIO?1WE׼u!YZ3HlYWu@k[~V6"*l=ؿ{1>vKϱѹOf}s9yW7X[JJՊϸ0&3ɷ=+./Ǯl >8b=X4z a Drli7Uk9 zb,=Y%11i8%{ Ӕ0jFy^+,wn*K%w!>INjdkzwj.+>h<|m-l/\Qfdg& W ZOCY3WVdL.9~ߥjChw*LU= \uffy=S;׽5N9%JS@9-uǀÐ=q߿ \)f -@zLQP7>@/(hW44@>jgYߍjbFySGտ@UʗU5O#<:FӛW/nfS4?9.7'׏N뻓'mo}j蛺B ]e}lAR߉*$}!nz;WN; ]mo9+ao;a1=`L2;/'H+vv["ZQAG&YU,ΥB8^[ˉ,sₕ1 Fvg"fp;.&FcA3sL"F4'u!{.g\U#Zu6>әb$5ʋ;;:8/O&vR7>8Nd4 !#K*%;>@v҃ܧG )(a|~=Lăკv&ehB+NFρZW3]<󎃆yb qV;WR=DX~Y,37.@G5,R ]ڹдb9\t|'LwrH { ~\h#EQK]rlO*ja?ߣosM($@AN> Iiԟs0iC51zS("T!B$E%/8Z8㉇g0)>QŴ.4w;*oC~A{^|ۓ\Q.̆?MPL^D5 *[s- Ę.-]WʙzMs}!_kɚՒ'Fpc]8>5kS$|{k c˂w FJ 3nh-gI.*1bXWbe1'3ngr-5 ~xsL@Rm/gJI z|UW4Mf;PMD{Ӈ\zT ktJD1&ͮĒ`L5ԭv_3v."_Ƭtvs? ol4e1+d$m6W<\ѻ}%6UFPYV[Od+*MԌg{X~hTFzâ9c|+dї9jYfꐢwk> 3Gᄁրq#~'Of7A1'NFW7QHڨ}UU肹4F'gNoo߾<17gӤS>xWZup'_=E)cTg_xf9R;\Sen//}>nt0M9`i+SXa, ZF(ަr]/Jq͔ig L ۡ_&l,L`t'ha/4:uHq43‰}y r7lءb2` Ⴌ;7*%tɐɴ7REuA111}]P}H=PTBz~u<^MћMk\J{|w qKmSQ(.ۋ$מ5iA.e|$D Ynȼ^hًs䣎$2R;>/P6P]_'G'-IR <crLPR2 Ϥ½T+oj{—_(J*+rקT6B~Ly7ۃ7UIkL)TZ"oZÏEy;MX^}a5Lְ{k7:*< A qU tmPϝ  ״7 Nɟ6 t܎t`ߢAFs?n1+8h%KK3ZHڇCi`)oNLshbsGwZJz"o;;ZxPdמA?-><9#ҼoglL_Ol+NF0ۦy z??|/xzږɕ{u4x:ɍowZZXi-Gs\k'&GF{;iF2P!p&/\H婠`J+AG0,:j*E5mopujkNoۜ]-j.@s- RCŒ'/r(ټԑ r !*%QeG"Q3"*Q hli"*h<6 BTJT;O_jvQx^pQ-ɉ=z_=~FۣA<o&ږ/SZTV߹㛿_s5!5,8MpܒJ6dbJyaGU;8#ڃJ$']dȀʝ?_EMvXsz2 MSL1vY~-@7ۡċ&'W(1ߧgw}P}:^#nC(F:n g%A7 7[>F./>v.;Cf0fK՟[nj&2 cF$")E"s UVV2ZhBZF {kh&6PΛg@Fj`l=y7& ^ :E27rd\i22 9^D,-$,^| |ч ;5 1v{qs;>08!(_9 @jমI,m6O`m z-/mσv #q%uZA5@k ~y2QhX{%'rZsDŽTg>_Ηr >h)h&VIR7eY栴0* k)7;"LCevKK3`^4˼[~0Pk; >BNsezZ?t"Ls&vG܇{);La ަQ PD*'*HFfAB`D;'Z@H;8^5 >:9!Տ!/ݔ˟Aa=4 GHuQB(S$e;Ngϣܓ$,LX^| cNb͌ {J*pעy 0Knօޝ{z̩j?L#"F>rA` ccD|*xUQ2!%̏/H_DGfUR=KPU!8lTcpV +^$āp"FHҬqywﱊGɓkM:<8_8~LozYiJFEs;LI;(֞(e\*.xR',[pVy=IlNkSiϢ"iL?;Ѱ+qh-OjCPmxgJ,0ҒYHn5~ƑCEbԙĈ4|Cݭ%- BCVD/MM9kp&ZgQ=E0†j@nvTݬaݝzSaZh$>*|n(S,?[pD IX+U3d':T,޳ie).(3Ll4"J &aװt*fJbDtykb<;$BFi^dq&0 m=l'ad)BC(ZAI>Ϝ-Rz KDޖaO]{:"+(1PWGPEo.b T<(SW(e<)9c|ŊذbrYƊ骼5M2ki?Mm aP3κ,݉)$֛ڟ(8ԮyH@pL{0+&kІ#;Kc|Y<`0yQU fU~]c 5tX“#w55RUa둺<2} j}'+jv6m:EuyQR !+J|tdAfr9o_cm;*$كʁٜ*'sFwmcvۡ&vƈRx _zM7XR;RjntKP24)2[:(=:*Xg63w *RB䷖8E1,I$Q%JUTSJNHBDYnT+~kU0W>J8w@J}I㖻䯊`RZAe{()A!WRtZsESt:Rf`v@)[B:Sj#W((D)cΔx d(g0, +QK{b.P$xNcb'{ab"Ӟ1GSvxP)lsE*-pXGRP+JKn{`r)YK X@[{ߚ.@2Όlj~t}sQSm^ PJMyRPN2wB( aA LBuLޯh49/ni:JI=wL7guŜ9c?>tbQͺELcBML+޹? ߡooC})WS}1ՕI:H#4:ǧџƙܠJcvΌ& *6h,;C"#U鮒TkR 07hdgi؀,^T'BzR}7sZSkK)éeRx BL*DJ*T2mNP!r*&jf$ ;l-L ,8C ~+PTKRgWf{D mm (%4V@hגHp҂;!akOwp:"V|;@+Lj>,$ Q2 L@C!),X=A 0 |5Mn#F!ͫ%\S0G5TzKBԢdZ׭5-Tq;{yq^v<]dXGbVaSlQH]e#@H]jPJRSF#, λhGjѪ/CQݦCV @L^j)dI&E d;Yc1\h!x10TPX3aHs4M)ˀno"v; ؛$jm6t`>oi305xl$,,}B\gQFx{aTC!+m퀘G;ɼ{`62 RjS!/8hr.\!EiCQUr@r51NY6XVg *d@6{oFmu)7/@U 0qrŝo*EQ[eZT{Wl;Z6caqY#m5j/13`5R͠4bd}3KqͯWֺ{!>!U2fu5Yio:3 j|S7/3l1.*{?+w`[Ul+\{Y!**U|Km;o9F\TR.Bk탥"rD k^و*_8TTe ^Y/l[*oUt*tUkڔD5'a$3!r*ИH*At]_[ )^~ktkuw ;Shb *rf]GWgp%*r 0MȝN/Nµƃ;xm%SJ}'m(%';;[X_{cljԜy%WG>ìͰܴoϢ_i /Ϧʽ^k.( h ծRv1:+*&4;p ,W)ay{F2[@` Oϯ pm K245DQX( KY.5rY%A djl({V{db~{XepB_{G'G> 496|8:݇NUşf=h0ڴѤFR-[LeZ]=ZSbM89C`3nq81)7?ZZ [9.0kxk߯tG@Jڳ* l?E r(PlAy{{=oc4FT`F\]ȅgVR߽vleR>w8$V$L+L¥ij$؄tYdfbcf: +'+3Jz/QX\@wâg5sAh)R~@\n*ߏ8~'f"^oƜ\#*= f$$$9(ߕŅ=0VRFop=D_lV`9[ f97Yzj-۲nl0?2ٵ^~׻aVq'GŹ!ޫl ڂN27t}EWokcU*6IԽpߘ{{@yf TߝR0&dYa]GzHtɼIwvǝ?YEv鴤˸8>L;7=gzbxt6sj0@|vGvɸ3}\]}ѽNf3݋p0 J_?碛# 72eMo6᝖B o.`hsSw{_&+7nhԹ+Uo5Y?EmfW/z5].u VN*11فK3odz<7?zV8b7{F~~B3E筻:,~K 6ՠ;t>/ П$3At~X9t(߾>0:~ŋ`F+0cۿr1.Fu2 Ą2+z0tKcP8stʍu~'VǙxЮjü9ɿFii\ \"pdt1};~3LU8ցoo L`| #>ɧ25+3,>_s3=EŜ/ ߚ)k̸%U>^Xp]>_ӷZ 8^Y&&'uTxDT%Y(lBsF?u4Nk5XOaX iJ;%n4_#{p%[M)@BKf`jVc8;y}rwG7fݨwaͱՁ@Q=gݝ:8m+MAuUc<~"B뵅P_jrsfa\ZU]{-y0krkS-~S} έi(`rF] ]VXM'0AI˓XB$dGj!Z}0a+o-muZwmX`$0Hbw0d ^θm/f$K%b8ANwυ9lƓz|z/Gvqz|򒂚~ї~_^+{}.6h4B_ H_zzYr6mc Ȳ|zsnuڼ{_ w)ǖ6S- ߮T3Pp1Zl8?ߣ-\ugc%6=V6d{1Qr! _(Q?RCz bŻF[m! Us sT5BJEPR8B4Sp@DLNGZ.Uk'Ԣ7g* {dF1YyDP9MTxE_ WEOohRvf5jd 14s%j0ܙ&ͲTDCIБ*Q2GYr6g|`50*巢fb L РhS y&t)&;Ty>``6qF`>WҀ"`hxfRH*E[sFQ5]4BI"F̿$E+d#M{:7peSS&5DFyRKM>P\$:M)<FAZt5薣MjġnQMP3^_s kDG#s@ ϡhRPT>Z B8aYќJjO H*V41!Wƙ$iIBrXH!\^ ԜPLs*]ŝ?uGc&/RXK"{ۯcdpV2*cvT0&JlXPDsXuj]'lu?fL7'al'nPG Lldcϕ:TG!*92T#)SSP!'?;~T]1xk:NUQdx>1m[tթPsHp ()dBvGz3 P:<>صL6@㈘j 7>Seڀ6RitEN}PtN,=7-1 8[C!ꀨ&ԣr@8h2 Ħp.WPd2(y  ʤTD$+]nLLayNkE y'okAӺq-~5F=I>_.L*&GdjQ-~Ô;\_lϕ%?ݪd.{|y ̒(>U g鯿s _Y[=chإ͇[Լ~bY\.˙+g^B ipkRZ|zr1Ji0F%1J[θ&gq{HKߡ& 0vià#eh sN:drF7'o.";P?-gKUf:|ݧ~/h" [ͭ{>~hrO_f4rmR«;ldkkԺpz΀(ƛ+g?BOs[נp`Nխy0cQe>tNի H H޹c9xԆ8C\4ܸ%o娯%s"5;;p"**^! !V1QpP#G R8B5&&$a2H=i5NӢ[%ZS]hiTFIsHH,Va:BQ(s}0 0Ll^’Z4kgFQnjd%\r# .)hƕ`(|I(s) /U @c6Ym -*nUYELiCFAt 2J6je#ڡE 2Ir?rW#gHnT-XCllH@)9x|5KR$Ett AIxHD U^k3q]H<}<7`xo=FE%u!.%zU$0Yi]5Ƶ.d QiXZ5p]\ C;1.d ]ݡaAsd}tɜ+Az7"pȍȜI)RAOCAUSfP|&􁔄0!`̆( #EƀVglމcDT(榄Rwt(2瞡%3TkʇO4lbC:/<;Gq!ʧ5Mt(ha)!ۮwr0Y)9Aɜ$y!fA3~%\}WW-Abs4mM<:R0 )׼#|(-avSK%ּSXf)u.Fc%Qcn8zKL 1I"s;=80CаS4F-~iØ G@LX71>X6T FPl̠aſ0Rl 3h ̭ #i)f ۮwb0vl[;kMa} ly7WfGo/FT%AC#c(#w=ͲŪaHĢCM4 20XCJ51ۚW 0a>*d`I`0xɈ/Tw)t (Í$  & ͠u}5 Ճ~>2tBQ+%M<#Sq^0%LOX/x/ 8]^J=s 7$2%8'@D)TAs2缵:CyVQéoGx y#:"0Qpt]!M)ż쨀zw\d FD^R,.HNG]f8]m9$q}v*Zմ'd^ 9QXǒ:1KuDw5jtrzv* ;SWtպ|VYE85QKA"oVk#jW|i۳TɥӼVwzKAL/GHg#T ( IU1Dj4}⢳{*qvHU{C}}x=I}G6#;[Y i5 _2GQc}9E:-{p BRGfW0ȾH5L~v}sdƷ:C`S1:Ao!=!Cq 1̎eio)վL#7pn٧p5vz+^f)('r{lnn?exk`\Ku$WQ"DcGY2n>2 CJb :$9/vW>ZgF6=[4̿u_uNGfd:hQL{4%S?FF'ja`d"a~O? m]O8縆H`ڻ=[x梘Ƥ̱ ghYEL(GsQIȨu42eT9b5DIRH}LN.C A,*Y7zZ]Kg,a{yqgyRËeqjR۟ӮiQ˗^9~f(ڪp9Wק֤Ye%OAb!4y 2$~kgB $~vzB;ćyk|}''ItB$-NMʂA IKNN%MڲX9Z){ͮA+}&hZ:vZ8v\Ӫh}rI'I1Lb yѭ¸^ kIЃPunxB)E`$ShCܗNʮM}?^upjz48X*Hq;J~ j*ʏT{蜴}ؠ߀ Ycf{ .a/[,h. 3Ǝն՛^F[&gwFnwhFc_|䴉[[&/ys͏/~f'rq~Odg@ńGhlxj3Oh0ͪ稯ϧ՗OCg41d/UM66z9QҴz`2V_<=𣻥@pIB^oq.=e@⢈w%ۛ$ W>~/{% m LrL4 "G֋?Q5 ,6 >4k12u7(ݧ١.h^w흯+wbl !K{-zCV'nn7rDwz  s< a"W}`3Fc6)$6N"G N]s8WXpskx?TJ2suM20E4z$K)[)=QU*qhQ_h4%WO\(r` 12..( 6i4GQx/U]N;9JiZPa&Ij_Pi^%9+ą S.`Q]p%x0kWN"3)[]V ʏ@sN&P]]{0췐Q]ۼN5sJT0 "Db$5 b Ĕ"TRNxi]j(V/Vk()~Ym/ DQjO0-'607"gx1t+9+gMmo9Eg/{Pw_~=Gf6̺0zEJ-(+7iȌd@..J`&j3%מ\' PF! UNi$:mY7L %'nuePFuQźm[ڌnMhWtJw?&ܶn.X2(:uǨbNL m[ڌnMhWutJ:u5we.\}BVWeTU QiO֭6e[h-R9m릻ՕAթ;Fv¼5`Z#hۺfukBCrS{![֍VnuePFuQźrQz[mFZ&4+WuJ(%Ij@>CF5 G/bI>-I>aDa7SNRsΊ%`$0tNƓŁV0BR7{hּԿI;tnNEŒ&&?.΍l̓BeS<'[+xA1|S1Hy3 GـGu(4¨pJD=+ mф4?e'(g.b7b &C/nYlq'x5{ 5I BSq#KС:&ѯY>aͅDaVe= _L(\5PěEn"y zq:>| N?E&t Hlq~_2u$MPr[Ĵ0`5E!g~'eioj^/!4?Y]~;CNgo{C̟7Fr63xz~NH66( )"w% +;\@ 791NpHቄ0&L*XƭLeJ3nY  ' ʹN2̚a~u20@nR/%ӬǷ_&. E? Onnn'}ʲ13)"=&JHuߦoy:sefOo27G|lա{=S@Ct/߽ (u.Ut8X8|*5mqA|F/=siGl}+ V+^R).qRR Ɖij I,Wv %~I)8(g+U-qQIJ¾9FBJs6lV@4Rj-U lV+٬Jp٬ֲYdP~=΂OQG#yN/ȳэ-g zvP; 6 gZ^TqT8dr5^Lh쮀zQVarn0ʮ3WYugq6UwVMx14 H,R5}x28>RG `7g!~4Y=ijBgi.vISAԫ;Ks)M[T%Ϳ )G;g. QݱQ-U砻 ͝(IE zӉr*XF6;P6u_ i6C0+~灪͔2 V7riER%֞!KL VT%AH)đ R-r4J"$?Cu؇X 6vÆn u~ZjvQw_1yI";=S7:'LbǠx DgXMϩ>Rh9U 9V6s6lT%@8]6w!>{7] =[hE 1?x,6RVH)Kזkɤ%=dנIɞrjRc&ZR8l D+[( EL'Vm 4*1s$^ÚD4|'phZ8R;I TbRS%b)HټJ eaNI627_ ~_d7+LGfGfNn{&M)M{_D0tt ,M[3rNC@ ,Q XiGNdj8 }Z1,m ĽHb? \ bE24:-.w1Pll(P~ wzW #x. ˬe>T1 {c|nst C߾cN VH rNcڛf }3㐻*'vY&xꝝd9z/4m;𳆭wYȄ)MIHwM5VQ(sكݚPPw/Y]Ft˓hM޽ED3ޭ$cxcZ-ZWš{pR/._p{ ai9+{sg:x {4*Tph3{ՌvJ7,vNcjZ]vG;)Ȼ:>ҤU$Uy7_<=֛l^եU댷}G񑦅n" 仿 JDbyEk9Nkd8o;Cꎮ34"+"9@:{NБ 9ӭrֶTג)SZb "UqhѼMy ԬN\Ѫ0oCT'ժX+2yVInUN>*GQ_'=ԪpO]i6Օs?EJ0#L6sEY 8u'8c1I)!X&gO3F5$3$K}q긤 l۔p5H1H)GB`㮦~{Véx"Tܢ l;ECbW#t|-[E`-JӉ/V kDϢoÌtfv:X.V7e: ot/z"7Q9偡~r7Qvmx4Qcem֙. S~=7dfd͖VdJkO{!v3:ŔdW/co1Yu[no>oW~;(F_}yIyO8Tlw8Gx7G=~DI~DsF1iubcEdE0DϊZ L!f1` fx1w)Rj*[*[)Aojd15r7p 5Lf2Vpe4{?LnKl1qYbuCwnw([: JBXpΦV )'URN PfBJhL”ĚK93Ai 4qyHnπ"i{ϳLf$?UBdRAX8Ar8݄!t6""!Z"`cG !x;aBdgEVZml=>$ºyvWA8b7Ԧ&%+zHJ` vD옎<  .E+,͋?.I%d+ nx?{㶑l4%<q@8}4G7{ƋO7EIDI$5;GIUWwuݺj/φ ,N/c#(6^$[.FI>XUJ(3y=Mad~$ӑ& gƇ.4+.)l0U1(t 32s42O5AYuԔ7/3㎛ 3@54Rk P)u/7 69f8J|tKA`ZG"OjPOq+¸= 91gΣXΛjk*0ڀѴ=&㝋DR07 P 6&jI][Y^)0Rh^"/Kj$ح3@>Sz$7:EΆW=̿LJ"nv~ZR~ D?z9jE<)Cޝ(&>rxS!L,feFls/ϖu:^<-[PD%=lNKOL"+\Ȇ>XE|>N6tq>a[]@ c߯<xkIfPM})T;CE,{X4j훻xP!S;(_άSXXoDI}ɧ\;1"O?FO9١?=UYpX%6V߻WYrMƞ%y"y+b[D z-4^|~:lHR.}?Ѹ(4^+h2-2>?V{Bu6+8lQfgE~LzGƎ(ZM6k 4vݹ-Ծ?Y[ S:31r:߮cW7,em? 49tTk,Rqa9SI1NQ#bEg)A;罷kLfqDH("qBqS %beud׃M"`S7ܕҞR yFNHcl9j/i4G{\keד/xs3!ZҶ戥bh:DQ+*R)/{&9c};x!\HF‡5.dfkK=&oyA 2dfHc"F)懙Jy:Lbef^rw~%} /WLTSq  ~ dhms}fAEkhPɥs;7D1Cnk5G SMh؊J&@[̖f;QND!֘1 ܐݞ>Hk8loɧPmf9)լ;GF jun<0"y8lNVXq(㮟K}.tHAq!Bgwa3;܁c̤@s[q\W<38eG1wzlm&7ڲ(YQ-KuTN뎛HS+]vͱS$ 4_F*5μk].MOYhN{^YrG1 i/v9A坫 FաҡZ%4ԁS$%(#}|^Dꇵ6{AS 5CFe[eEg}>GG%SaCQmay* ʈ;EQ,K(Q -H"XA(:G+(u!yQ]U"z˷Mk~STQ)H3xɚN1BZ-G-I csw9? WwSTiԳ,њA*-(V5O{I% TO%|ZQ15+\74wJ˩-62b,QM٢x9#Q8\YѷY c!+nywBjga+YKEBؿtā! _-œi \JtUpXitm厜hL}J|1mr5LCttL\pVh2&5ۮQ1F@NkҝnNF)+W|TKea ̌cՖ0ƱlAhM+$}$Y0#ٻTm/5~yhA&XI8ۥNZtL>kթ8w/2z7D{(P58i6l'{,%Dsm{DsZYϝD4P]OޕH ;IG # \9b5j'TR Z0rR5A$1PkCAߚCR qCo- TO*1;Lb4@kI5ӂ%ok兓c$Ne49]H!~yDk ]udD,Hq`+D3AU8ZDgoFSo}*5Gɑ:]6Y_$棬`EYlZ|j_HC5.8kRxV]>9OqԎx &QztsYqNxa^%v)t27' ʅ }hUùJn&c:.Դum=hudh-b2.;j9 P>arD/Hs$X8)3{|aDYcV^ل4vYG' ӊvG(R&GNJW +̦c6_:)CH!ى}08a]y^>&0LǦP6:\[ WB0'6KSLǔΦcAO8,$ G<[׍\͕lA<-D=R Ozkdݔ4f)p @39rsݓb|SJ˳{z>^ Est:m2D gG/G$ gq(Ko-V-;!U 򌶥D3"R"cI 'o<|Zk{qd-Fj\XA3\I6qz;YV,U"e٢8#^4e6~~[u[ִouoVd2N_r~$EQm6?.20\ГmyMa^DPŃb"Hhq$,+mha$3x&٤hŗ(U&p|x5lv+gގh ~ +qcdl?z!B q*#r@ޚov]\,1g*W(g5\2 %QSRޚ \V9rӞKJjuK V-%i&ủ7FۇA[s}wJԄkW6@H V#APr1 n(3?61bJiDh 6sj^?N|5@Čϓ3J15He%MI7J$iCcR\fAT˸Ԓ>afc( 5FU0_s\r/G- qgc!Z1Kxa n׬iA0Qn6ڪpQi[g_cuu]m(l1>݄~^Η r9ז~򆊈j%Z@k8XeNN`rA`_|9ڰH,^ODIb3Ii4`;ԍGjӯ&=ؤαshx|rW+|QV `WPBZxㅹ1LEƋ/X+zfSJ~2tD,-ZDv‹m< ^VjK/ -?jSOb =wnw_+w΅f^,z,kjlZTWyYC#C !J͏tz!9 XC23BYAXծrrQ ܻF) ?Ι qCPVR aPܻF;`:⺆:e k{`5ט)4xu<ƒkȮ3q{\`|ǰ %^#oWzlswMW5Ami n>f?1I,]1cÌ[*:a1`gꮆuLӈ ȴƷJvU;t51DV=d^V?\IDyuw5d \Z;F:5T$֮$/;5R%\[ '* Rc0/aca0l[Z3ێ|%V#}jq(yRa6l[(8${;]MwF"jEkulYLNnꕽX|ynriǧo - ?]!V,=1+ R qFRԱS5ӄ;7F7\*W_?v`KZ^wr?gWa~s-S?HJO1JX6*\De4QrH0JD8UxHtBh6I[ h>+P#IEBEqc5(F"F$ƂNJ4C2Qf([ (gdg@YdjmkEă U%=iBT ƝpnlU7C\4L |> ~1i?acz8_edGŀl 8ٵ;y9 Z\KLRv}kHI3=36X"{ꮮrE)'mSrbzK̀@Ʉ "J Vŏ{9)tXn̛ [N&-.W?B/V?g`j]4M\m:[[P .{<"s&1>sw pNiOLio;/~-O MO0xa>ʷgX_6ѧ7PcW6)WZkծT۟_ͧ>Y>iygCZΆN9}>jzNSѳ {azXӕI<]aBLwxziOhpJtOjruyy3.Ak&{yتq ml5R;W"-AdjBF#b4 Q7RhHrLwj9d85Y'rDsadVM(&K*5֨PFp&5*RMh/1XcCA cbD/{ƑGE1{f;TEhON;Dj!ּeiӘ$vʣ, RQIF4-AG-SѮ.hP=A_#f1s8XWdZ`s>H}vHGtN"T*-mIJPBO̵q4R5kl,gWǵ80M0iwT`$@ SL)> *#Z2\ɌbBHH miv6o30Yc-o=J`"CƞZ0uwC }"=ShF#:P)y jXE0mV l.wfk 8Nun|xKNi1J#-7a9#:%M*Q&$ݐv#1Jj&|~7} +'YiP>z|=:UWUaFJݦnrA`}7I+[2_+NPvP~'A8մ ,!>ZP$IcL h: OPLk.V*gj pn/fW˵q)Z܍Ńі]s^p04LZe'?ube !Dnٽ1sc$/Y2 `H=!XA4By! .X0K$jo2cP^L6wQU74L}P{R $.~ɂo ;VcD$s#1&LHE `̽VbL8dsBwc}%߭M 6)H t l0dA/k0B)XeY$F 'LFmhq E L.w̝>`#f$gj^].nUq,7f^UM(^C1],nVD5,ʞ>lW8AN!OZWǘWc`ZAtRN1jxa^Ǹ&&P{tԫkKW7B/csçt_#).Ljs>zUu΂qmoaupk!ݙ k\pT<=Qt|Ǔ.on.ne8IuʻmxҢDl,]Zig6Jj:YփdDR%z 2Yփ8D1&/t2=z^#j,)nȶNփG8Đ%B (/%n=xTCHTK9DoOփ&+1{eC_3T0Adu]=L0BGeí@ Dl4S~M#BJm S~ùgld$nr w^uy]78P6W6\٤9X9C:؞#s1lg{f*,AӇeu1˜^\͊YUW:wPUL~cyUc )zCмD 00CMnSQp‡J:\mrp6E e hޡ bl.Gn`MRRMh:}(}WSHGH8##Ai!r&@9*V&򈑊*G5VkU ޯSRົsh뺥&lzSڻγpc9|V{=8 BMK8 XDmzPfVD'E*!H04`!h/tkCNzE:qy G Pc+#* fd<a &2ZuGD92YC4F G$I`N1N y`2(!,|i_^lU>Ao/еYL?o>O>=`^0:f>Ş $kR^d> ]03V)S)ʻf>@' ԪcӒ[b> ]0z̧${7tO1}DԈ ٝU 󫋋0dfއJ%uNX~Z҄~LDrzζ!:CtlEtGB;`)Pْs%鋗ZQ"BcE-2O&;xzq|z K`c-<75o1bY2$B\gʚ_ae;. =8ZñuGm/mɢ{ TQTQ0 "d~yD97ZF?IOC_L{HT:]d3p+xY 5B9BuL8 R4f;2vha(aPJPH4 kWjI$TD(qt G$L1[ObfX]θt]Qf'`&n㤕/t<@E*c-9kzK| ݛ#R%uX҄6꺿_"G8)zKsBd)Rgjpl/P'/U~Rgӆ.+],2dQ= LQҭJK盷?WY/\,FǼkD쁥tɝXP|(>0 ;jUw.u?Zd/osD#]Y@R!Ֆ IAp-st†h2ޅw, { 3xMPa6}3(5j 4L.$d,^ A| $ĮN`иa@BD ai !>!T'$5o='ꂒiD]5ŧ?A5ŧ?A4^ya@Bj]:!p$쁄>[  Apl]_z=Ios4a6|0\X_0P)5{= LSs[~rrn ~rS *VY}ՉE *?Z #@gʹt 35]H8^zŠhV3Ykvm\(L=saF4፽M@D~~tSh}-F7 JBvIEG7}eAZ,ؾNR L8Hz(9:Ap`je>f}Z:4^Vnwz',!NS$Bgc$" : D P'Z5c%lF \q@Znx]UV7+۹B%k/N񓴏S1~fP]ia'Įz ;v}\-ߏ{W /u .Dw&8 ֥K@&.Z&.KX+Dѫږ vRoT׏to ߃Uq!F?:*1\;eyR[BBeyn´J_J!d޷h\x]qй)⇕8k+3pb3s:E|N&)hha9)Z $Hi\&0I b$S%$0*{j\?{whlt3Eh:WR2IO(Y>f/{PA?(q|S:tkGOE3{"JKpc4TG2PqًC8It؃t9L$m7YUX\.=.bm"U۴>VwRX.ngc񦓍mG)/6#60 a@x"b2dX; IXLL`Ei0 y50DEACB&"e8լ07 H$DQ2ȍ:K },T"qM̄|yd$SsL 'k1׿DWYCר{Vg:[UO[2އCWW 0`ZT8KFḗųCfgM':I -s:M0Mh|q`(_f+ۜ4SWcEj(e.Z*&N-c ~sǼ2̱^Ά0St QcJuӽdY7?>sԙdw~Blhci/ g0(߀O7}|few.c7}- z\3ٷh`銡~Lp ZC@xOXŮ~h5wI#B"_T _[2E" 2z|d ̈Ug6yHO)a#߭?|K׊aǺȵXfAt>{(AI*_CS8Wfm W]K{x7OVZSaG:ޓ6,V\1I|9ɴ CCfF^ {۩fTϊϳ[-X܉`Y絶osf`2>5#R8[C?ٛz5"gX=Ub>FgR]58)Y#f> s]=ZTJ =um=a_U.M_:!w;0zct^ѹ3dqTٺrp[N2I`[vb# {~]$ˎ:=ؒ:<mի6b=suӝ?ƒSd'Z2kyݷTtb(CcgPɏ$0EƢũY%QU@p ()@)yo獸#l, w)3+4h?3Qb[{am+!R9TSln10BZBV*Cx(1Y|i4߃"3霹isRLa9M\`ΈQQ!+E<0ƙ'ZhNɜW8bށ1=Խ&~9 K Uz. aM\;8UxY˴]jK]eTj/kP,H wt: Ej/T03ˮ]xK7D7GP(<]R;4R;4R;4R;,Km%,PI RcL  b.qȩuŚ1Ɓ&zƢY8rsZGem;~rC,Hr@X3!S* IBT c`LS2PW-@40uT73Q@GZ/=a[Rn1 1X TcH:7ĀAhs'3OfcsS\%G3vQiľ ЊұB#[9Г-:i>A^e42Ay'Sc>,r fiO)_ |/H"DA$#`B4 3D880ƺ.=a{j&Kp\zz"9,Z5#DsMB(}lQJ+wm@[<#4;}0ɹ>ZsjD6ٱS_窄;~/Zg }ZrِmR7$Ѩ2LLdA3v en%:v(пҁT˜:X6"$D*ԗLz!Tqa=&XJYR#eƖ6k Ef̙a9?z{n4l õ(ZAuzL8+BP:aT%<ޘe@i o(JIrj et RDl yplOP)>QbȢОJKՀ36L*`AK(f^/`b U~JD?[ Mg9h><c@2x,umǟC(xC ZJjQJ;ZJ%0 -ܦ$9ڑOt#D0CD>h̢fb8"q-S$;gN_U-79ā4v5W~~q 3+2i9̭|o;xRUBfGpa t\|0!99C{B=iQ\3F6E , E4|85b\Zn -9m殮 iDnjV~mEQ; ];oæHQ[H>4@3dzy5ȁ$j͵2ܚ}t5PZ~c\(J;W<IOtH}W+4SZƫEB,R~_ P,Aԉ{03W->:µ'vyBONg_ȧˋ ԄR+/>{3?gp[opN]y \S?>tUKI/W)5BQ k,),r C E-WhЂIe%1k[^u0F&V̜A2 9!*zpc4?ˏK):?J*鎫;8!C%ǙdTzG0OqJ#% az]ow}Q]_4kWv`mdiKC*IPeTPQ*E:9*IJv+o?X3vD唳lJQ!KJLPoS0V6`qVQJm $EHFغeaSIT%-a]+;eAxD*<]H=|!el[Z|#Ղ#_+l FR(%V8JP ]:k\?q\3EPv!"C92d=jN*Bǰ,,Qpq^BT&F\8Mˇ.'҆7b.1f'12I V6:F6 ZKKFD_Nx.!UQRR LᧄL-_(Kn"UZWʓGJSԂ 4<29рpN(Q}hHu`<@cGjс`HmT>:#T N3 1hFy%rxm 4FXt^"D_4@FU^v/Ug AP:c<9*[v:r9aE@R !u]k{¶T?LK<,xULp,I<o} * RN!Ζbvxsه/O*imj5:2錧rKU8PK8ԯ[MΦg7nS~ o4i '묏6pmp!%}x}Q*Fg%CQ DKlj4A+s3\4cf4 EPA(ܪ ا+珷ˣ?0md/CX<sz–0ɌgPۧ׬FEUG>=0 'g a" ~|ZieXsbʅ4P|B[?6U\Jf}?O*bGwQH΍fSs5{Mx_=FI_ߌűgg(\gi"ߘE҈ HlA8I +`"l 6O\gBriEp4Xj'|I@E[5J*SzA'RH.zo^?ͥ?ӇfIiúJ(õqۏ%gz.ưS$z>!],ʫXKn} h4@ S$M,w`zoW30 L6JD',ւ*ъH"wX' [473@(,P $EIj86P"=Yh*R--&K\g Hn7= iuum@n@n( y$E4n'^NZg=QodIEzJrwB)%ۏ쟛~knR\nn>5CD'kD6PBx1n͋ambM);O?ZTRnyU 慚Ÿk@_kD6݇~o; Paj t{6/e;B[4NoNY Ƙ _l~,6.独oy0{2`l~(mp]ޝayC灼Q`@STEW=jh b}jAeTR½[R}PΥc,@@(5&I"wpi:T(ycmbэ.dӵ/YmΗw̕j#m~tĊ2{ثY۫4.#m38f3ۙPKٚ7`g~[T_gqC-_}(doIÊ=%Yyer.`r 0ٲNDFjI{y!*Z|r;#(; .Ɔ)m(5TF? UiâdHuJD7Tʤ:m&:aG{Џ:/f,E|խ'/pP3;C2J8Š @6aS}  }["'sog p rqx'K'6> }L"`%U@x28fiX^JJoDDtu {7OQ;"_,)TZoQr눐 (3|zNv1x$t17/ܥצfx팢 }y*POMVj(fY!]=Ę܋?R2i}KTiR,N*lr"(Nz\ T+I4 )(NǸ&G'UoNn]*R݅;W, ʑGSJU2|{U5| ^ŹIHTfJCPq˼ 2pS:)O( ϮHE*u} e]-BJd侹6ViӇvvq=uW_$X|Pcۯ[s{݀ R"#ehea*2p>Σ!Р WSeöՀaCeYl1 ) O)GEX5c¾S=@/D|ng\.V?SLI^1[qD:(H,i_&2D25;3yt/YUwt?x5) WbĴYQ2;yM"003(7nufpqˍ¹G .}p=`=+׮m¤!c.W"|I]x? ?]c=-Sř=t7HGŢf{,P!y8 cMa0\bbx{#Y!NҒk̓Ryd%% ^eIz)KJ=e*jI:6jg#wONc}7RrOogZ>=:)T;F #cL잚CRu>j$ܕ>2UЃ'5gc~tt2k`L`\ٺn7nD Ox\ә=WG!MW扞$H -ɠ" 3pA9cu`DRi'z 0Y%_";!qHH9Q.'JKp8tϗs Aa(R?!JhgF:\%a|PpO `=į:.Uݷnwxu!}aBE p_ d978_%+E`f+AȵOF$o<~y)z=-"S ,KdB/2zDV C5,;& kF͋hr >-ZqV -T> @\n%׶;5U.TEz.qQu>O)-ANPk[n#GN4%K#a>NSO8pmkL]vOo"%UDbdTy*q^5lwn 4 yضabAHЏm^˵zh T|X^[l-:a2ǴD&֎K>+e/`Hpk$9%0UW;}{/(J-=x6]K@?}NUɆM56FhijϪcmfl-̅ {٢Ei\[tB`9.s[[onT|OG؜x "&Ѵp`"U\e`JPDH^m%l)5^U}=?xB奙:sjNS.Y 0 0ǽ(][r݅^~‘+* 0`N)p%obB" A$ɟZ~RD)ĂQ^uFC @!S@*tӑHBP!F{Jh6%ʚ5SǨ yȁ8B: QAԙ3)g1|t/)@)$! ."W!2S@BRK(dr)Lf"aч@!h C^'c7X!5O7Z`-/-*Bri(`0Hed'kDoIEʠR53u{IefT$zA)gDJq2 l"up,KFZnHZJ_W.tYr0,xvbqׂKKvm/oZ~2)| `NIɰ󉰒ҚbT/k;4xD`q wtC%ִs~$'k3ԾjLa %t-24_oKn>jcwԷM; baN Ŕ\isiT{S^r{B._3ۢ,nn}y:imm ߲QGQ"aGձ+BšQy&o(AU%vfWGп/P = &h#IȐ8o'|F6O{T1D$YrU>-߾/7}\rγ[52ƍOl Ab<٣٬lI?3Qx`ݿf~l7_u=D52EN?>h\tحu:K0(1xK%ʡjkzjx哷Pv 1s#lZmR@Kˋ3GkXt.də-=5! ^ ;TtWkM5ĿkAOzUFnBlSׂ1l)  | E$p %*|%C/_nr`#C~v8]#RB?;BE~p*mZj^ҮZx+l9 {i} gWܭ5">jn5U7IL!X3,E) ŽH@K$vp kGc!keҿ̿-97kQ4^{.uK~]F) 5tDmVZU*A1p:`!kLTD/a4= olbeI Qh'$Z|gCʞP10 X!5%FK)8 O%\͵ >27<s'e4¾rIXn "k:|wV_8^kGN;U%#=Bڑ/v`NF;Rߡ)떶3xr-tFш\=k[I'8{Rj\"1{ƽBH"I1%^fVĤ͠sʚPv=x˩0_Lm"1RJQX|4'Wy(ä 3.oy\_,$5+DQ0e.IFs2r 9E(]șe5 &l9y(;Fr%Rb"I_!5pRЌ,AI& WR_L&SR2+.aR25o|9 ^VMSusrqcaԆƲsE@$/M>x*Ȧ/aXA8^tXDC%&e?$#^#r=\w!m6\t7-pEcGByPьUiU{g`N蘳ty!;Sһ hFҘ@pбAuYnW؈j.Et0}ՄY>éBS&r1eiz`<|C(`Զ>>s;^kJ8 &TLAJjRAlyC`%؍66<(ⰘqZ=lpW>1V4nz6򖯆]7$닯5;ig-"8xM$e׼{0+fT[_l;sZxwFJֿ cʨS[HB'>{6q=m:(Y- @餃rs(I)k z@T-V h& pc\Ř\G5rmAʘ EXȹW[BmEgJz?g@,o:U?9hཹr_]-ο]/ l]L.ډEy +F /Fx"\ 9ڈ3˞Z,B;1ljj7b8\Xz *X<ѥPjF+%c~ !2w 'bl-0W41)Cocan}fW9bA \G :1j']nX}"h7د+Z]b1U<*i N>nM&|^^(cYT®\|xp{-zS\jн,‰X ̐#SXj zjژNHk88U)hoR_}!dl˱akA%f-Q\@?sΩr.ɻA@Nsdi,I9wZԓ OvV-;RG<`ImBJEwg-œ )fRsX(^67>-8CaxR"[擌G`<̢ 2̍s)a=C.u6R{ tɖ%zb[3="Z%opOUVQ,}PW%{ِ/6. I~]<:Ϯ| (25 sF4hY.Bx^E^}scHG@Aǚт?2n Li*dD"/OI S-Q8P 4f]vnDŮ&5<$oH<ڤt ~}LBv`'lFfWp6xuw%Iek|UWū737P\_9VVONns>lphFy Ը2Ʀwb+0iPc$N["`CKs;LɝdF D$E2h_u/\'EÜK[ .8J1>5-SrT=p4X|9A*~h )VĉDE!hM> O&âcN4'(QS-4',r=Vۃr0*ɪ7jvƪ[?Py˟Rw[5>L|zݏf1M'rQ^ܼ1WVVƌox={Ꮺl)e6;6~T埞3ڷs>Y`| D㒃C٣DEM{ǻuh=,զO~07ʧAX+gkLn=c]U|s]^uڡyM@ ҫ5}O7W}cq2g4]ݙcظ1C6UqLG CyݪalNq} ,dlIЪ{ c+@ vjF4}GQ2|Vqv KM礦ޏkɉN?uthaoy"e#[7Na<=*fTcU:VNk|L%״>fctN!9, yG0mr~1hV|)3o&;L])}vǶˈ](YHu{](䩔3Si8HbR;aY#/ x^Gϒ ?{.*5o%'B6I2rW, ږ HX ϑMdî};lk%+5ʎYʟ?K6nta٫@Eem݁3d#`9+qMʗ)͆>Κ#+_qoumW2ĵ eh`phYk ,rJǵum)[+=im``4ZV08a=R%Z9h:z!`\}%>j-Z nl#udk cbGY+ @iKBIX_|6(HlS*PKI(zcA `^wZKNfT6} $| ;R!9K@.G`WDa[_ 6ef&r_oW *(77ܗMڇP]eA zF2AB) >F54佶V=CuzmMhSuڙ B:mb#bXڤIи(]롋⺖F\9ءSZ/k=i+mV'mٽG6 D-Gn)}m)m-ýI6}hؙ ^wVfxzDHu@Jǿu95&{e(`=ى<ۦ=A#PnҿIS8fSsnlo$dL6 :,_ Rf`ބ\x0[K>H :1}46 `ymx\>.R0qo|wۮ4M.|;:Q|(N_)=k 2*#fDVp%I}D$ҞZ5 .Oͫz]F@U|&C?1}ШjGϲW3g 1 'wBM>Pc6ʝ-:agf&_So85q ~ sI e]/zA} HJ6&%ֽ/P/;L9-\c9rAëpx=>}6S6Lpgg^0N)\^z;/ĤYp ˈȵi0ΤV*5'dNhMҪW% s0ShBf1v'{;&buX {"0B̆ˡbAsj3X'cQBQKYG:}9>X842Ҽ).dZ}hNU1-"GHD/d!-i?y] w;u-շnԷ2U¯?x.?@X%XPPH.n9p9A]"2PYU!1"Rzr7r nea^RYM0 iARY[Y- QHӲĨFoTzE9<^Ah0Fg&̃lIcBpD[KkD(4V fioBxe4J qgB!!肋QU0SR*XK0oHrCF $@8L Ə1A``B 3j^$EhD*ւm {_:8.;o|;%?VEŦX忏:Vb?i0Ro"3'kj"9p`M<-3C'#CUI\nӁ8c|x!F9 t2Jsh@T iFM{Zpo"a+@W"?3wܣ 7cl8}g 3p2 a{^̺vg}(,߼x-O$\NN.+2`0#0Zl4 &2y =ݘyy~6lh qwua*;wѤʰҢa&g4N1\Pp_Ǻ"">ա|?vMx.j]>AA*B㠻ҴHb׃}%[ncU e[rT:z`S jyڟ'&$|8y\-#"P? Lr3A'ퟮY̳:)3uĖtCv ?29E|rdf7{[{uaFOaF{*o>HA$J28rFA-SPk,)ai2T-î?.]7_W=egpS hnMZ,<Ⱥ!xoiX֖΃yJ$?,󜸢U_$b* : .; z8P:Cgw[n ,IbJ,>)CS!φ\j*nfٯ9_OF&mJy/m g7֙->Dn^}rK&5H\Sh0^_ QDDn1odyNa4̌Ow77M^v෸z-~ƌ'<Ҳn0ikƖdI~toHJRٙ^qzY0>SqS#l*J^|={o\{bb;CI|Z`=-&F .\1b0s< )#C\TYXi-3X(dۍGEͽ8 }{$kn}ݛ{ +BtA~9)#EPxaZu4`h!= FD4ytQ_-±!@.=M!? }cX _v1fQ% :rX|AI;O׈`UKP^N(OE;Cլ<8֊'8SL8+*tDϸ;mF視:`ܶg9DL 4/^]-Id.Taes4d0ʽy+4["5*0nt%ӄ,$0GTÖ/`J#O4bBi,/yEĻԂ1)T0&LDTTpyTpLG0r "~"./i9̈d:Q2P봴 ZR qNT4*PRDy1yS+1T 2`H#Cޚ!*fwoMT pq2#MIXKrf=xphn& ѻJCք\v]e)x;%o]yCJ\:wm͍Vm ;3qOyS,4.#%(;{ؼhv7/NHl;82gJ%yǦ"][>(iϔw$iφ'FQ9΄ߏgYNQab،=A̺Cji3Mda\}Ɗ`+B,Vճ$Xi[{UA6ֱ"??Js=29(S\LX. r2هbIfiQ4VNG $v _?%RevaO\=LbfwWxP- jn]$EVt;nLh ^ z%G[/1j_?A<δAPƯSFB=Tb+o*[om`49sUYŊ,K8*5nG|JButo^:ܱqkV\JLB ZʊBJfe$3!9X87sMe\R嚒 'F]gEA:zq+B)]퓬Uym22/[?-~ad}Ma(A?.wqnwqnX޵[4`18NE"~2ϱ7rks3(|I!s~Tܢo:F9Xp}mSQj͋ 27GˏƟWS[b۵I=I-me@tI[ [1> !ZVWpV([ >!x;)w]:IPOgpdK3 XR41,dox_\Abp--3gP@ɢgh碁wS_|o.=Nv]+V?}p!XP>|OO;y>;+TH+ٺ^H՟_%qIAQQ˨!;,Qv4G_% !0, }H0sj,'crFz0l*H`;䌬! qxC y?D0,VH}5T\KF T<(`p3+ V(8"A/H#&kHE`Ę6w "co"GQyOh,K4! yk`MҬssD? Y #ؼ0aq**#u-beSi:yYԤ] %zN8d72"RȌ֜Kd{\mn`*)"o78LEզR m^C8ՌR6T.-5˽'Җ%.fr -qD+IāH)C0(qLe\ć2LPe[bM8&DN>9t#v= tB( !Hbp 7)*"p%!#JvH BZ !"DI."U%>^5P[BUMQ^̚U_y}}4 vb8e&SIڳPx~Lqg?}z {*7=u_ ?Ĉ۱gDN1xnO=hFbFY?7"7 Xݸ& BD|5K =FRKB *h H@s`Q+'^_'@v+$8肝[؏n_(.>O#?T*ќmOa9@$0 B -7@IID'`A,vs@ !vgq,։q)D7 =O\%&xTԏ1,Y?c0i?An/R2g)#3=UJ*lXƓڷ#гYџ%T/~fۛLZ̝=72֟=~vXnst{ Cݗ: kT^mr޼fBF/ؒ=EMyק(%\gq#c" _ Eg/Ă)" Toy4ӟlH]h3{ *B  ó$ 9!kgg?OJCbv59#LOg*bX3x=>ͫi̩vw>ś6|~y6و6wV2Pxw]z%R\Adp<ٞw,Ϧtf=7;Ϭ_<8a 1'/MӘ7E'iLBȝ\MӱX>,|<-l-g`ͤhv3 ܢf,؋r>~=ZJđ#X+r IF%0 f=59ޔy =t@j kcK 5u4<Лs>c>{׿Lo7S_6oB֠Qb8 K _"ЮS< _$ج=-ro_$!Wkmi#U&#JѮZנhOesH@p%.%Yo5,קkM9仗_i..M 7&=A/OOهAc[6`͛ff^ZD,.K`鑓Wzy[7 {Ixm,Qrim+'p ga\kWYcbg bJ~›G3_TjpEVAeL rð8iMbmہqgkxhhE|lD&Bk&V铳oJj ҍSof{".rUViO9zw{hiҩwf鯏mz6{qiN^ >'_y 8ƥiZ$*}$>Y?=t4/tz=SacU)(휤L:9b1M7_QN2 ˘!327š;Dd{ޱٍai'%,7.UM'ɋF9= RWOStX{9[Tj֯ 1-uLTdBe"v$=8,45i hBe'ouV-gKv6 xW@;V#WRe|U\ʉ_OGjA e{wmet%v(G6A`yM@UK>z?Z Ds0ȀbB Ls9#D8wjJ_9_-_W߭ޛUG:^j"يW)㞞|9{*8!4t.׊[tv8 q{&m2鍧Q-쁋]u;eEdޔĒF--f0=E)z-DwC J'HPUo%b)4.BR[-˵pRYTo_$"!{LW!6su}n-ϵ^S+ ęxscuS0Q<(prrcD[wVT͵V*O* 8SSa(hDC_9b,w 9wGҷaS6–X*HչMRPX@1%1j !(#+RXaFģ`eV!l `Xa6u-ހx.X)R/oAl. l 𜖊3\儫Mk&è %qz+04xt֎^[,'H әϵ03LIdU50TDjfrDKM WݥHѮKc}]hN8[y h,Ɲo 2Ln ˺y..kͺiʼn/4h-(R;ZvMF(a1}xz+')V$zM:b[ hS۵P2 X vjQ3o.zyXo<ɄMȖ@As$E]ÓT`o+,}$Z͇t!w+;?]`^q"w=48|Ig$]+qih4!ғWDɳdt,`4,K(͛ˁ/OGשsyAMwKB&⵻E2p "0*}0YY'm6Ĩ*QPo>x:rر_|Znq[``C.jdL|kǽ,3Mu:G:0?1 }5Xۛ?EY]\IB$CDpaIksY(lMm_l#l{Q _-h )%*AU""h Ǯ|LuZ,כ( +.?Q>7 0MIE2og >B}Ӳ!~2n ~#gҽ|c5\MwA:H]Ucb%d.27oى+]W\ƲPlIb?۲բeX|#kEw3wŻO[Aڃާi{8%ml|AǰI8^w_>BdY+GX\epj3FQ.Hu8.TAFe$N p"W=(:/@žcϬ|ެ<+~y\,y+πpS2eBv³{7$Tz۷RRD⣐?qPÔ&bNGӆdߏZϹE$ĐP. q8t[6r䅶FEBaPƒb;v/'S^Of, O;Dcm2S&0P:_J{b!s:|[Iy?J1B_+y.⼢\}gsJbUhvfIf֥p̪6/Sf{Q{TY/LkfgenӮVRWdfz /2 WRo]uurR+⽷qp˭N.V:X%Y]) {뉷A*hވmDS?=enY9]ǿȠޤqA'c4~,Grd^%7q/{MKn{|dLk0 S "Jȹ( x!VPcOWXNn۾j0.XU_C-FNX]ʡRQu:^UmK)jPltX$k4-{kfm%ct7*B/om l},*o 6GO PLn#W\j5UM"tJ$i:}2=$2,s;֓Fv<'9O@d2YyU܇W>@~2-fVȞcFJxRF %SWv֑_H5kH.PWU"-9}**`ZW&m g\Z5@5Of=RY6K{/Ǘ'=1vRuKz|WwT/[(Wo\opTsMȱjwݟ |݁; Aǭl5 l6Tcآf~SًyCU^mLpd75W{?Rijny`8Ӷ\V:?!h(?zjӨs^-1HjTYtfD*s^^H;62;ݴ~ ڭ-)9BK wLn]H;V2ո>ÆvNB Etھ#GZLIR-f07?% pڜ_̶Oz#PG^ْj:4Hޢ] (5X奄c~% p{'D|XSq}hp$nb7Оk CUOx6y7mϊwE0,AvΛQoc̴֐ے>#œ,M{X#Ы.- `uC[32ڍcgt4$nOLYֿm#͠6woʅieyzwuW}Uo3Lk:23dRZb^_f8[?ގQ20F)0D@"\P^L!eRJ eQ@70VD+dK>-$7L}p|~r| K6g(:d]:Kep 2؊/y"KdGҒ])LŒo @8/|T[Ci(n8NB%3%8O+{qJspfP[/Oq6?~53Kcwjs] fhfQ |69 ֗Y*+JT9i62!RZJo<4bf!{VTp_׊U UQB#*o"AM R\PQ%P<9]V* A#]@–{3Ἁ1"w巛H.">V7zpv! ?(XM/OC'3H>Kwh{Eo_cy>\৴rUNݍ&ҿs?G/ww|$pm*`4"t*JT)>yCrhieG'ʎkBh(JUBačIK\,Z5 =jFĽwq;Z9/){8R7j8IBPiFҲ5GЭϝ(ѴHTNmySP 5ϭ`iK'h RS3JJGkV*7,c34WH<>D+R"j7Ө"<I0gj)+0Asf9D%yBj#δx8SDPGנEl)` d1@,F&2RAP\WiJ$CqM@H Zd&,̌SgWt:ʨ2< @O"hxADAgZJ}c$F}+fh $M 9AMMG$" gI=+wpR`whAuu=!#au@jZ0ՠJ% #GCPH2k1|c疘E%*ӂHVد[o sܮYSKr%>CVgBjz>}uM|vŁ[A7A'sU/^lu'h!xݗt#a[զ.nQˏѝ>~.p` 2ŕyBqa 2;RX<Kv)G/T"!cu.~\UaY |Ù@@.}7pN9w%R{ KWc"WTV%ǒ A8"91ڐ XԢYWXXʈb qi6(]& `0$`&8g7`K:I54VaNXjZYuB7vF5c.6j]6utr˄O,,C5nD\5.w'Eldd!َI܈fM&0W Q9h{gĮ0Rjk󣵼<{~(0t : XZaqsɞdRRK%k =^&Y&Y&Y&mah='^:y&]o6xE8Cɑ IxyFUck7_[v U}Sz?<4XE+Q4ySwLԽTT@vAkm`ל=TCVB=+!1p7s'}/%Λge=0O]Ƀϧ7,s6Of/xwt:ζ/ul3Onfӯ+NNK,0_:bF W]ou%|]C=憛W)6]%ͳ:%?{<Lz:A"a왔]vuuoBshq% // ͳH#GO0<%W/g&vīogqzױY<24>[>ߞ]|~t[ZBVu}Ooc|X-E /CFZ.780DJDr}}7e q%]lq~59CF nPH {uX:""nj9v*U/;@s!H_3[+7 {yoԻL՝ڼBHrPM;鼲e;Ku#xũ5ԁ,?mX֘K8r SS۱4Y,<7󫬖?.^]M?wŇGّhl3ΔKC!`TŒ_r]9xb x1pG΀J(9sNtnܷlxagc/Y \g'@9ֻp L[/f&XI@@H)6M)őxd8nxVB7:BٺF/٪ql|+޳es'x$WK졈{~ ʕ|ΗT-D^^ B_/be~/xS~B]+9\l{}\4zp?Ǝf(j/L첪fzxzw^4*\ُAݢnُQ^v'\6Hp7u!i+ |WLBȁއHzztvn7Kjjソ˷(! 66ECCdOGSI*dS79o~ ΄|Th,=dN~iiɓnCh]1؞"ԭN='nkNID<_a77gR!Ͻ|eޑ}{^|ܫȷSߪ/~g JM%%b,4]={@l*I{WS &"TݏJze꾼F}@t6U5Ty$-BTCoe+P3h.{m*v <[k!8>9.qL~NsntvWw]8YG7 bϣӫY-lCS?ǂQ`F4A}OC ^~Rr+ame$Kњpb [I;g"W|,0f(4@> 4p^-0&v= K}=4g.[# ^]JF#/gd=O2O2O2Oڬ<ڲԀM1 84)低srd#dMH5 :4I4#{k7[w>5?(і%mNH^. R-4ɏF_NrV37P^">qn!9k(&Gp>$B%) YrJ~ERBɴ_nϼC%g6&މ("2mz  S=>hwg0rAyu}=wTշՁ [PA;؉0-~|fe`:PK™:Ҏs;{۲i>۪w/'/'/'/'mlLGt2RBBes&ZYʏZXVQϜnT3)w`a12i[tTeE΍ Mb&i .`Bx9<$7tB2ai(OE{ԠPQX*,Q? wd_Ux/?o%$gq@~"/G\ڥfѯ|L dnﶭULKn_]=n^.*¢ $ZKZ*f9%1ry9E\+|n׭i̩K% |z4OTy,A3DdJj3h $w9,\<01)a_>M5}N(|sQa=1yNvֆ|էQcHJf}t̓˙ 2sۮ`w燢定?v||ٜ [*WMyLk+;32XXrLp4?;!OG:72BQyM~caŤUFRBhՀ>+эͮ>my7G-`t_}TƪWA(ԪU'O@ )p׉̃r! )..\Pś8))7֍P CHe,)0ŹuRZ7`%Vd)w뺈#2 c`9%.P]o=s]䷗;}:8hEnL"Ephx!ƒ,=!QAA JdJ{+%jvYQ\z-xkC+Ԓm JߗNmCmr?r?r?r?i};fwDWѦ;q XŤm\hlVGVjݨ*VBVcd#?L=dnxw b$ů[֮e~w,OG?;]G9j]Dpg#\l-؎C<<B؟2Zz.|(Zw(T#"2E?ؘU16I›!%'MH\ޥ& cd$y2fw2=ks۸E/mg*9-1g{ =0~~uzr;9oͷEtHt)%B͒* X,ʐ,7l x'8 & Oڑ"fss [LJئP8,b)?cBH!z$Y'sJ°'J&^<$!I!I䏥e%l-ш#K.fF;Q:9X^„ؗgDuх%_>2W=|@ Mܤ::f?z~q|2l6bOoߜߜ'Q]Q̟fdhi+KC|HE.v[M./Z_mz@ʏ̎>9{DB~w dJnn:KWƊ{K]lf>].߁ϖw{~~W&gF[G\&VaqU~u9z)A߆~>rX[K !I;5'xJx1(^vt FF#?~+zAvLB6%ScY$w W?&uClVuԉ:05Z/ycK0Nl<`N`D%8.#K+WgY2,z_Xc(~Z)XUe2^xÄΈ (/ ?.!Gi1NEAkRXJBnGw@đnd:N輹e8c (\gT3!Nlb~;i53zAi3씪b𒌛$ֶnjtX ae;xQw\J Hn8 m Mߜ^^ԻpBa%NYDxŒFaȄ,-,w4,YbJLhj7ir4n̚15ǣ,6v ͤJl` u$xIꃅ4l<9p#h&8iQ7uzqң9M-dmٱ')JW V乙cy{ +;q4- )k]H39%@d^yP4ˏ$78y\Jb eEp'dTZ4ȏSd3 (x=և-UT*pNʢ,*2J(l%W{idBJC0_?槃nKIUb#zjbG4TM|L:d'Ϻc] A^]x(I\3xִ֮lՔkW{JZlʣx~+oLH N⹙ ӠःHVҋKu m/C ;F ȅ|IHBhP}% 6Vn](iU+efH{!D"S ^ Iӊr="…VPRdDirWjʡ %{ީP0AӗZ/afU C (=9Ggh$I'(յ2N[v+>c3ggMm> *n7$ 4ȘԌIcpB#Ia4XSsEmT\1NHk*h7iD{Ҳ{[ܳo˵vJ~Mޜ%rEXB:7W~AG>hQemnP{ Vʆjiz[~} +M_/Sl({+]@Ɲ38;V9qHrrꉁ+»*R OS L24`>` Fm?cD&7 ZkN$01!]/zTyrO vۑ(alۀX B]TmBZj P69 8g-QFoJCc#iKrx#PwFeۥsH3Pp2J) Dw+eQBʹ .Q1ws+e 9ⲿ')g)&o`(I& < Zp?_0$;&)kRӫ^4m*)}U3^WtCȯrޡhlUNZ#_>3:5la"NiMrW] 8=NaKYIm # F Qg.7%PLZ/t6cQL$cЇ~Q.&LN>!S֘jQ}^obUuw_ΘGPN1&̮wo>-׷''RdJR&2Np<9=ţV˷+~8IR|Y?=?g5)n>RK̯yq+ LX1O&?=m6<؁q _))/~s49Ƥh_8߅ۻ2 lq,1}m&I0OR%$l֓ J#GmtYPeʠ x Yܝ`ލVËUխloĴtq.qMnn7vTW$6MCsgsP KsS5s9*F*٥;I[R[&)G͊U¤ǢI.k=83E[-lU+Uu t_U `#W=půk JӘh1Q˫m _{6]&ހ {%}YQEi*uY |aɊCa@湷'6:@++ـM_zeEZd -5Z$O4HZ$@R*%Gږ*V9r eOTbH%K7AgmDReKԜkadsgBQ%\Z uɄɋ*+]8HԴfc1JsbԛI*iq~9VLL&(Bi&4yǗSh1PddE- &n4lKl xi)V3z`ȏ;F4|t#$jl4vнMU_4{4]}AW}7$h4zkGrkՓp;vQ (-hrw2pr~Oj%*fg+;I[$uoG GE]{.O꿍F#@u cD7ǪT2eQ d]U9VL:(cg3#D1_Foj? &]Ѝζ$igF+=:mfgK]vkʾAIu$/i Ł*/cɊJkPARTIYJ᫜URU$0%h Ɗ@wHmv{OgiX"=/躷t7 Q7d b\jKgD [:;TѭHm\PSkzfvL@dru^9)hfGXNuyK*@(/t9WJsE쫹@-x#p-c[xRzʒWnZ9\F NMg8j-\wBKr|)fͩÝ8̦Ux.{VcP`k(P߬,>H3{Kh߳Rx'6E&$Nl:yj̶Mit'q,]FWmԺr.J xD&O-9|(gF/ٺ>&>LwVI݈l[% SYnh0"(C>0l[ipִxXcR7h:n8c qm?(=1Sn}Fի,  V(*/9io%ȃ4(@Ʊ\:J^Au 7!eY6F>/ErYIr a\{@A}+SW hLMMޭ7HTZAX[ ^)dvs@1Yv<>J 3~I%`#dbD}[բJd({5ޗiv1j;tdfK;aK7dUKZջ$]TуtU@UY̖WO` Re֍D9qus +$ #Mt)FLj&գĈAFwG#i0DܹD# P:ND/nؒ4$h8l4K/=tx;nwfLwz%EUe)eGR,Ǎ{JlY+$FݾN׌nDC~EBYk]k4աgmԎ&5<,&'LҴ]Ϛ,u@ZOYےEZrmn:}mTR3o Uw^/Mǯi)-͇kPK< F=(otc_x:[*bHՂcNJw릖f_Z81w:9lvOx-KTn=WE;)E9o馸_tuN1Hi_ȩ`4tބtc-iOUE5rcZd&>={f%xl8]N.Vra70,٤USC@{ZO)|Ӯ_KdPR}URXqH H~rZ&tw"ES' hh=4 !jA7kpC/ }cMC25n7sxJ#ؑ E!G=ga᩽\2Np)$ޯ4+38_$yN0.vڣ3\J=H{`|ZSX姀g_Ii)%T<"Dx,!GQʵ:sZ@v7:F.Lt^9"mzAf^)X᩻槨DqTPV7? X տt%ˊ-*6,.$(EC߀͡GRtZuA#$]L&OSmUgce*\lFV @ .Wy<{Դb+R)}=_Kh2'z9wCaKcpA]{pFo#_QɁ1.@ ~dmAk̯ٙg_!BY6ZcJaON @3]J ~,Qch@B+@R.2X4xY1NR%6_ 3TzapwADKI%aAAȦd3ovN+\VkDFbưxn$92ƎV\:s8rTheb:3ڰd-D{jnT7 Y>O^$ ja .>-5 n4c<̍/ngeo/\4*'f9DAT^<=ϻpb,^L WRUО"r~4/?qۧUB #i,Kt!C^L &#C1a8 ''I^\q> QL:`CEv})W`s'-`ū'գjZѭwEkEW{p\lD{\kV K뾳'Ub!RBr4(rqs@dYND H(kT)r3rd8hX6_mDCrg0:aA{|J|lI"rKth ,L#cmc#3 ~zbcLTTAF-'P~рd'|0$DO1yT)Rrd"q)T 9~ŕO7إWw ($&I#IlbSdr?2=x;Ǝ9.%̠ᨩhzZLQ*>'209LQy;$x;Ǝɘ%̠) }^er;)j-"j)USft@]v*LX%p0:{PTc(Fzs^V*SP2pgnlƢctZ#Cj _ g/lv +Z| 4q&'47⩱R4n}' FY{`1ZݨrHɄA-QV*NhE؃SX$6wM`wMU[444I#E$&eeVr.4${"%.aώKKS/PnxHgc3{4bM k !罭O˽]vقRj ԓ h0a D+gY +RWNEBTjAᕆd<;L;؎#yF:{B`#z-?a;çM4J%N Pg NQ\W*1vs(3 jBN_8@.:h4Oomh|#B`CFx8׸i799?AFot=.J:?䈲]XhDsL5YJ._'Lobe<,SGC롩X}lR&Ts9DtMH{/w3 Bs; 8llE1~n$>5tFݽy}y4Q/ 'W9% 92ƎƎuƯ:bp2[- *[- 9ŝv:Ờ^$h+shs\\"V*\f.ݎmx kLR׈f-NHtӠS GD&nMM;XKS 9M)4/+ h KͲɩ0ro=CCUX"!2.<Qe2z㒹மHISPJ.lFl藡Pb a -naB0,n2 AХ|x$]Қd"vy}_ԤPses\C3Ed,Û7 W y$M\eM൓HxӅN LռBZ#cvm}R7fJkyC3u (ǿ:6C=1`87Y^}"~xɎxxDJ2^-]1,c%N \@'6;7:JFۄ5)sH%&? 6CD}x*I*%#k~y SOßY|.ZԒ/۟&y{l$ *[7Q\[)!K쬲;"30f&'{JY.dp:8J%cuk|[YYhVJozsU%-c%\߮gUO]bq(<4E׻Zkxͤ19 șjceVBd`g\zv?˔J_OrƧ8][@ӥ0lǞtG*VXF.3( 8hQd bؠV,9jthͶwD#KAj \"sϙb CoBiAf,YLIH˭!f%^x>uoݰV2~PDᗿ?elvm~?g^y'HXNG脑-2㹑g~/7vI&xgVO~J $+c2Ya.j@^)4eԥt<HUdx4g K]%Nt2,ңo LL: K9X&62O) ,Uihr!>ؐ姩-o@ aby=\,/Vk !y^ c6V<|a֪+:٨KD@ 4gI2[P;>iu}mխ\)t#P[庺TgH=GVa-uL!O:*\NࢉD;ZZƨ4!AI-2;*T y"Mb!qRF[ euW6q(&Mxw8w < #*pX9ʹLVn0iG{:β }ʉ)0 R-HIɩ 8I^bO'QjYy]խ!ZJo됎3CP@:Jr8! Ap&DLĨ18 #4Đ㣰؇@h \DMi+u^=2*x#`R<̵,􏗇Kdϰb"yH2?o'S3sE?Q(?Ch+MI:D/aQ0(N 3BGn &lf -h|ָF?z7~[8\4',8oz56Z:"[x˃Խ7 Rf8w`pxvݭ?L,2|j?^0/!;ߏta8`$=flA;x05-to7p>?6'p`֦E3^nvv~5B#W9}zk̮^vλ;]Mo {yy{x|Ujga)F]\wznM&UF&oЧNpOwmS43y 5*1tq:GM7tzz RHKw$ M V$et?%LOSStByA/ 0~;oga+?=9ī!=_0­[Gg6jv|Ԛ~=p a!yޭ \nC~4]>;wį7pĝⷬ70gF^|;G ;wI _+&/ͩ^w m} s;ۯ_m9v?T$ƴmgOLDGS's=C=Gΰ ,D펜Mu:槃]Aw0vOp^4}z=S-u4~gyz|'o@>"귫m)3 .z}h&?;ƖI6ҕ^?͔t\s2ognOZ` 0E5pOhq 0xug%@ lFmMBsNurLO.5}/KYY=I i#w@N4ŒC5;p'Ýb"1 0iXCS QH6cj #Y]AY] ]#H#zFRL#) nP-׳1xH"Sjˀ"NLRSXpMcY< L+NO4f.3*VƉlҏgt*C!e6S6ÿfPiLK2%2xY"qq\&spIULIZ~ϯ}u||k1ɴSW'r{ XL;]UIr=<8JOY/gOwBϜ?jcu aqp6^:0wpOsN8Co3}wKz]Tm7j ߦ~$N<0;dݼ||||VD5R+u;B)!. hɜI,M2gi9+N2 ޻y3>_FaGG*!.BzRK =0og@D$0|daN0T& MЄ&|鸡   IDW2CH)4E2#|$XcN»K0=,h::63+.5B9gX`ȬTDEN8A9p#9')_]+VN]5HX\K ^Z<후nɵZz{|{r2K@R%@U[6+ ] (L㟘i*i_K^s5叿s+Ʈ}6׉Z`DLRlc)&Qj ^:3A˺*U^WV(gRT*5 RYTj̅E 9޻IZCsaz7F.qBP'a\)=X-ߘ}j '\1]fXęƃ[xsWщ,ґ \(i,RFAqijD1 Ci\|odɲ hnQ^8R'nu0)cBfbsďYKwiJ X:!%тDYrd`]]-$ITuK1 ~ kVY/_V3t:(Q"0_`)c(gp-2p,i5#12uab^M,[TKT5Tio2ܕ_ .!@Q阊 (=Ȥ q2µ .+ o 6 qea * L18@)ZD+WŐ@ 3%hU0 ;Jc-*E3îZ:%c3HAei47/ؐSKCM0Jj%PkŤ@v3d`EtʲT"NJ1&ֆf枪dתg cx$t*.lһGzZ,$>݊f1׹lԿ!oP*33FM3iLH `Ve%Nɢd z~n?o&3><}Q\,#WEFVn_dwϻX]J["N%C@1ƚ*bM;DPiuͳ*<󧌬̟ r% !_< DrZ@`$ԦLfр`x%^x5ik]Pq,cӺ+=LH ( ?#嵏m\ۈͺҶe(.(J00:Vj2e/۳U]2ggJ{8_!!7yȽ/q8 )׸H W1MdE3DW&"uTOw!4;f)1oлBgk; 9ɮ-b4HDjՆFriJ]3O&%"ސ tFb^xF&8c)> VSn)lXO$NQpF n}$Ԛ/[Ter 9c%J2N[q%, Nd~jqJa?`E !׶O0Nc2N[a$S~CH%ۙ8r5g[d 8Ք9Pj.ЀWVQP)җ"*pڛa~ג"ji3] T\܎0[|@R044iE}h'ݮOR#)Kexk[X ;o-f o9Rg\I)dKE::caeOV_f"ߴ|'r/Wl(Ŋ"ixL5Z(\v"2tX Clg ՜3D 0;P bSy`XIC9ߙr5쩆G}+Bš)UŇP/@Gϵ+8\}>K?p{ҏT5 ]{b}-֝jWOg/fF++m*@!yA ^m1 HOay;yT/1ԀL@ЅNeF{T~op E>cf98Pڟφyw{߯e< }A%΃i γnfg^jyjl@!'χ~nuZ4X߯awFs1;fǏ˝^(e8< 209EΒ%g\ I͍˭ob{PjxR{Ռ]VȏޟɥZxNk/|v7ϋ!̉kl͡ڟ;qy Tl_|z͝EN)>g/5)=l:W}gZEz\tA\0u5ƺ.Cv=u>@e=<`tȭ~>_՚WFj*d&f8I[tBB?7L oA'=9/o0B:Λ[^6xcq,?Ւs5Ze)8T`^#`v-6߻YG;@-0~e%݆^;۠j6瞀RS6r:s0ށ1 ,P4 μʘͿ߅@tkϞM#gZxp*׭ܾ#mb) LXnA2 (}6@~5m}P `Z}Lm,E ͔d2SoKEudrs<, gAѳ׀f,83rkq? yPmjb]z#$N90HRvly+Xlnjv(hwkfpwC1ؔDK(!e!T(c:˧qt7~0j2L Ǩ-Q&{a8af)=FP,I+M}b%)ܼnY]Ź=X[9@ 4ł)'Tf}Mti@(N(LPv ѯ |ݵ>5`- eY]ng\DwsG5oY%)% @/ø3*a7K dg28X@&>p#?PCF]]۵昡DY;?1dLjp)/D缤Z-OIDh"Ĝ0!+/Z$XƶSoOi~3t9QoF92͝X$#Nq,Yw;űRjqE @\U;HB-]Vy# )8"J &8!LCRVJBLգ6F""= ߵ$4 v-dOJd%-mn7ng)\oK9h[gS^2Jưw=S< $?"]{=/VA~{7zp,.KX~;CAӾNWPkB,UEA H&FY*=:r-)"dez-ҩ nM:%-)SV%-)nIq7Gqx(aߚL\t:9ODF fgswQs. ܷ0P5}$Z,|]/RFi%q+pe^<r[da'hS=%%*yPɃJT.\wq/m*c+v>O8 !sdJQA9c%H) B+A?5ޫ@|}[ ypoynBWo+ӷb܎˦Qѩ=cBIdL@E[BB" ptRבz7nK8e a<D12Q0Ff3R2+2-Ց*YxƐO a | .c 7%)jN{'sg)=Y283<Čz`n3e"DhdxD泄0O\ Y5R-lit ;iav+շe) UPb/"\NzJHãEx"Ġ hi=l=Q}aV6| _:ԳhS_'u)2X5?n)5e ֔kns).4W" -fhƸ2v:dX,:c7Xq6Z [YȂ}" Sz =WP,)ݷ/5Cba/ e5~DX8eb+jv1 Ќb&2L uYBoG l+Y/ySЌ36T PxO-kJ$|`ZXX,RWQ"r WT(EQjR BA OnܞK8d.V7CNC|K#EJB_y Yx WN;|v)ւ-Y`;XId&Z0LAs#*Ef^|Wu*rD.RLVxԖ6̋6\P!y_89'qRM|}%ϙKt?J^>n3oGϹ-icĜ|.-oۙfPy%,$\+3p7'e᭡J{Hvj~ta|ً4Զ/dpȄS0cQ7A7yB^iQ,(mT$LhN<L!@,-DBF% *oSkW }g)q{݂j"[z!/&I5ˬ l_𕕜`چ]WWtBlg}dɠ7Ao$ޘ6ӊΌ<"53T9mx8,92 +s̈́خ9xMf8E#))NHqtbZSR BJPhY0)U`CMpO)C()Didij::MיEz\lC_ zhKfK׵3*8ڌ3@Yg !:`dA :H1P @i E3%. yZ*#҇Wz$|Z`,~B=\f;A0\XbdF;ih"J,V!UQY*}@us  b4&8j ÉuF֕fa(+lCgVؙaX#Ka,5Ȧ MkQ`fΡ_|hAhƠb2 I~cӗ ( }N_rSZI5\'*ee0sٻ洑-W\0{Eݮ@l63reD$Hqic1B ;3 >~t95;.rv'.wr=juSW.a5v%=}2 +Dv1Żxg=+  YX'ɺ 2+*X$®P)DA<=.Цj mE ξ+"2@&0g`˵醂9`'Dr@;rEB=_A|iLIB5ڏ@ P R#]8Z0 jZM+P'!(=_WX앂v[j*98OKy = ٞwK1DI>CE~)EXqN.!%T/}oQ bDZxfA;)^qUЖ8`c 5pxIlfH)&rp`|g@j !U.c cui+ Ʀ&`#BK. &쌔M:1螳Z?@O&3/T*0B]'Bρ#\j#;:a$)aIgY""L 4O hCDGƊE8"*v7g W9_O?Dr@T9&1 ;קS*D\ 7>cf {chBTfnD xd3<ӄUěw v!ˉӻD |2㏧aaF=96QDTfѝ@ z(daf|7pr(&X41&e.a#n4sƸ(m+[P(D$~Z*qhTbf0r[%uBϒ"EԠ R mX)!'T ɩ(!r%BZb(SZij."'{r_,p9NJ`R>)`%<&v,ipE8cD`0DJqO+(f$+VEFC $e~Yb%aK|.p8ēLr%av"aEw%1V`+]QK siy^n[X(u]0x^g] pHڅiA"4TTRa>@;8 Yn;ۧ^/3(!z/_`<Ԃ@)ꌿ-Oב}ŕ5tp`~ROzx.!r5-$п&܏13NV` W)sfni3_S|D^2S?'{<[x垝M?bܑgsR5[A 0TP`f)_f@L(R*s'(FSy6rWD \/Jv1/%2f`N,>iֲFER(JaD@#RGYgX3fBWh#|2US[K_ZE^%jFH5RD #=RŪ/U3Dٱjfej1XucRJ2S&wYؚ+5B%wkU;QMAC-f:g $ݼ3|N> [Lov[ݘ7+\[Tl=)'zzO֙m =-0[cAlEp}#/CEaeDCi2ؘ-}+4ʣWx~N}j޴__:]3ZV.lqf(٢ j,fYO(hz˛z*0qk !ب @p~pplV< ijMe>;Qo` $UiG&q0;W6͒b5hzeRcvXC>ۍzG}pg}~L[az_jwzW k;V:$`6UzԲ2Η m dն0OcK>i޴~]( cN/jh?+{='ZY`u  /~Gv ,Kuy{hwx[xhנ}|x[6^yݱdr`夢]\_l[S Zw|>η4L27b$z'rÜ$sX=~^l(a lvnmuEeQa 6}-P U;o2;@[ Kp $\լuwmZ|-;,[U[ͻ`QZK{_YVFȖI\nV@<>S=^&j] z܌ z<ݯ.`.'x-ۭۺnY]nf`@B 1BYdNCo;י5Խ;{4K-fkzQHj}i bѿ&^]<3ked=frK.:nÞWS%lG kDʸZ݋veƳJ!eoOK qTw:FW 3l`c3fN0ci7z&iE[jxݰ5$3*~6x͵%^q*k[6Ϳk>.-on1!q?[:jxgn&8fFLM)tA2. VNݚXS+5(y54+y eKu5`vyn S{_N5;CKb'Y5DknYSS{z8KVQ50~/' {,x]edvmjΊE5Fm{  k\Y6 .ŁU멨VVݽ 5Yfxvk`qfw$FrGlzVw874:mkͫ`Wtd2[8b-p?Lc;| 1*2^lToW14 YՠױK|vEEpC{ k/Uuo(E|'-9@PKCv94PjCvR,Iߩ7ZWLk#+&y{t;z*߁޵j dY{ZAam{X!U^?Z K|3:GPƂrZjZW}^oRijoQ?M.| jt؂wJ-U|Rq\hͤu ugq2k5வ͂țdW8,Y62Nswi~։}0_3=qȏ 4ԛ ?Nzw7 Uۯ3/34(,B~z1f^(/>1YyL~|!R|e,£_ t^؛MVK O{?M$6ouų[ܜ~i2nWsMì?m}g h,jg HgӬEN3zJЧ0<^lqځfx,d=%sgܷ r4_/<~UUF|Eٽ{qzd!(Hܗr/5 }7&gxyŠXnEMz ɹeg i_qncdhlLB;/,SDNG:AM}.R\ `ުuG۫RC6։#tBb3InWkdwguff ]c' Tl/Dx9zOODpIN<[~ UUY=}<r]sj7 d% O&ɷto^!/"M@ Wo,LeAM^g5X=b>`_q:\q]O7 6]hq KםW6 Zm$kNmmyxJiV?6%tni1_Ysٛ1+k&d)q0)ɳ6XmI 2uU:m*7l%噮p"E% )NSԒ*=4q22$*I{i*!.K@T̿ ؞VbaOm5aEd-W[IerTgA`)ENSz(Tf $l4[i=I`*#l0dT2#ed [hDBO}X+0xEYKVn68#EUJ" acU|ipƔL HH&|U1J 2Keԅ3s;,Ia̪Sl:ٔպ(q{[n0L@qD(SPu0MlP%W'(BXVhQyNGL&WPx3ewX28Y]X`tTk*UAubV Ϛ} bk}-Ga*N>ܠauYʅqhi$2) q젢H9)# Χc'8) t & KɲWm{{ebipye6]S%Ejf M` h c$G+L:)O>JSΙ'Vp",C'p ^ c%?jb 3J7*h{ryNL\~^ |}[c̤DDi{([Ivd!d0o7)yYFi:N f*r )ɱk:9YՋ9i a5'h;V)lN0 d'yY:=Dl3@s) `$(5¸WZPm8uTiM>4G S,#Q͉zV*dQ #!Ќ@JPrPCK6 %"{F,9:U-9:ة!3#j>JGaad n-[SJt)9p[R3I %]ph㾭c8 )AA9ў+ֲRrL@CHp2+a*$ɼ|>fK)ԙ<1c2 gr3Ԧ+g9=cf?21۲AuMCVY.x)g&/gbiu>dsh(@9} ̢1XqViZ:g-^'Btf*WZ*MvpF^*oh P4:֨ZqH:ҲW8Y[Z]OZ ryk] ,M}cZLZkXu-=c3l]4d vFu<=ImJ&SQt^>Hy効GPMdE˜EUyNF(9@ׇZHp&b珵19\bet4d^|Z5rt1& fY[exc`/Ƃ͔RjI0VY^3se E']xb'Ĭ4N8%ǘRTl1i;U\$^ 􁈭a-FVZW?] 6w@3ȱBϗ[%]np˖Y{hv2 ܮKPcXw[C81Ivn ?D.tjۮXD;Beգ iR-T)`c dafqP0wzo Y$ ?Et1=xk 9@~tyv"p}v篪/47tU~eǷ?ϮJ$?X|gU{bBzC9>cQn>eSW-/0qq{ƥ%Eo޾y6^|M"y{@?䛞Wܨ -o.>?_f$4?*>ƻH xj^}]aZ5=lj\^nCz_boÙк3o}/O ܬ)ٻ חWd]z`;2iq+!;/.*Yz"?Qۙ/o;?\&gE,*]`)@c1Wm>>-]~a'$909,\fX<}I" Ěu'O>E4ш_}((E**0()U;,Hwo/_VeʓX^Fj?$a=L^T-KN_ ǟ$"rDDxIU~g[PcO>I[f/n˫S89lrsԳ~= CC1='9'Pq-i ON my_kLۃn@ i/pҵ_o |v냢߽W2QQX8.3d N5' P;'V_,KHze9{)g?—i']]|vj=ͺ,B~kyuw0xjˬ,ӗU߶_[,=ʠ T!y:PRP2T WD߾.B 6F/n{cMߺj5m=izJwۨ^~=;m- V^^*9KCQ!19T;֩rZtXw]/~?DG+gaNvؗ^;>}ޓzq\8TC6/8";r6atJFKTɕ*"SP𩐢 EL똾T5DP*fFtT @4WB)(P}m^U95PG_flQsN)ViUýys^#J2>@lҟ2Bd@N {E v֏ %%9J"k:Cvh{|_oj Z9΁/"kcj¯vHX:nWk!k>ph`͘iv:J8 GhI$3O<Y{~*םΘ [N헌I }$eٯ=v]A7'x6*NWr 5*@Oӥt !:0UIC*ɚh*6 rhў<$[ \=% Q:=m 3. v巑Ӻ:6 Ct4,eH\pBj;Z+R}MLmDa`d􏵒Qٲoro5+zH/Z.y B*m yݲQH$CABYd2MƄQJ$, ) *[pd ehtou<G^ڳvZ t>5BI扡#:v:"81dԘF6w.+{h;N'Dq0jxyb1{кVo3vUoRVcl/8)TPBP:'5eJ0:^cȯV;GO3 GNx{ƃH}^oΰHu\^[L!˘m:0g>FC3Uf(d&Pm7QkUm㎧Z HsfyV h*Ҟ[̼&w!x1)d#fYE%d*KJTTVo1 1 l ojւԞzӫ;6h1TZq!M׈~F>g|G%+> WR^]ZEx%OwCB[j3pX/X%|]mHDXX)4LA@tnnZ>՚2f&LV[(4VOzȱS"L(`bʚ *pRf;Ph9T4%?j_!]8JM(|8 B <륧 Q̓^d4ky'CE>Gr(񠃵VV@J2UJ"DpMYeDӲ螿 }7,M1sZg}ڧ88ixs/{jaq\۩i@1ZWDFᆭ (tQ^ %;ȴ}4V S3:G/[$5O# JU`eˢT;a2/pMqkɓ[=h+<0^ =£׾zǞɧ{ik.9hg@7T`Ƭh-G* =֐@4>M6Sjn?<F =sxtO.'4$Kamʿ_ٿzv},ǫ}7q|E]cQ~eY^ĝ/JΊUDΧb6$Wyz@=.4@TRc0OFWU)0X_&B.|Ĩem#i>8H}0J+َ+He9q L&𰭨@(2?X ~ZQdd2{ ւWxl7i6VT .`bq %L` f 6W4ڕZx@ZB]"KPwDyBȢ˄$bغcMYRm䏉J01XYI"HDy+VB3 Ox>"K.dn[wl!H?qi>8O_RRi)a޺^d-iu[it٘22D Y"f9݉iG ux51I2rk̓kT1 2g۟ziK63t?j㿨/ԛ_ǾK Iɸ]|0!^I(,ŎfybQx:??Υz0 Cʛ<1PxS>[D"lY[e^W9b]EmǙZ3̻Zoe ۧEv"/z8ӕh$l@;tG *:pwaxLb h:}dS-z2` ~D ۼ!~n7B_qA^LD~ڟ )_kiI h3ߎ#3*vsT*AzՉV,xާ~?tz|N;wsBq3k9?@H{` ƇΠwZ'o/jb퇋%=| џΎ'?~~7vr~kٝנv8)bN-Z߼nږygڋtJf\n/b{.v}Ƈ j WAo/P;yIi^'wvQ8aDz[(Hn5p7L#4"wM<7р nOF4tPPQ#+o{\k^6zg6{ sּj5OZa)~zՓi^_ CU9ok/bLqi4XO$^'N7vПvRWƜ{tW!楀z:0#=;4-Pb6PG"sCj{7~o r j/&DZ&F7(H\pZ!g޼8p3lVx}QMF߃G Bh^}~250s={w f;ʛӥd84w1 ଙ:n٬1w_Ww* eڅBko:o:kŹ^ܸ97ƽ96ySo߁Ћ^3OS-ulw=Yc ԾcONgP.rnnvPLfv[ΗI-u7ΰ;.4#N_Η~hqܹ{dUtǽN/rZXKVf~ \V!RFv2,S6pg7rwݬ-rN~k@i^ڐxNa~n;}>ϞoSt7QZ4B֗wj3ޓ^#2iXL]>`/GsGD9h&x\@i|xha*?ӗ~V-X턱q$I ]e@+i" OU ‚D2ء0Y t1"[ʕpf~#A W#kOPgϦfjv7g_x"W`BI֟md>%{}`F'Q?_eթrA}3G?0A?J$<Ȱ9|[vA+a̸;77S*6?  Nm ]uSw# !V]!Mײ!;ҷi!,:A\Ɯ2s vB<;暞by6 N!tt `k j3,5N#g&$R xkwU2@55n:>>;[rc\")]x9^/q~0<$RHn-OI;:^GW9pMg𰄇%<,a KxXmäՋ%<,a Ç i 9=?Hxz㬄wM% ,a` KX0p0ʐ+a` KN6 '`P*Ş=No N[۔J$r c x^&R&1h/.Q췋*eDk8AZ P%V0qCbB5qsI %L25~)rB>qLYLJꐑ6ބ1UbLHeRIabJcMDˆ}/U*i>MC nLXw7N4~fsK p5+dK TЉ}邧B2*wd:*աUh\(5Aʳq#<9'S# ('Ԅa%\;TFqhD3lp ӝW-։Bvj$3Fe&$&JLȴGhdBڈ'$RGJGI@?5Dpӑq?ou} ݙiA;(Ѥ3̲ƭEʓ˳q1/U*R 0CȫB}Elz"p%Xٛ`$,&Cƀ lLB;ZC%aL9[U(?:#FPkؼ@)8j%O{:2TTsVW<ڶ=r GAST;%69ѪiM(?q\ Iv r^ZܝW( (9JpF'`d7XH-Ov ,o]SBOU$[" CyJ2b2G*VɭEşnT۶ܡ8g:i2z)yXF VX K"atH,5l"IS(Q26":1e\wIp6̻?S|Sn(N( UB(J1ۈr9dĀ`ї HD#(bEqvy^s>hb NY!SiUIH!2 )08 _8`=*O*wlp8ie3b W9TiqSNgP-Qʰkܟ華lц-d$K96F߃'>P.;4ϸ6.tҜŕ9Nn {{--\x9NP Lܓ2fHuZqvY_b40S8-LRIh,1 #K,c|tcSGxU@r*.! KwR8#\fb .mb"2LO,N*|[gVRVg4 UF@]ix_c4OFcdBf,b*ʈ08K"wB*6(SVTJ{YKRi!?voƆc `|kmaV_c(-*;_s5fs Ř(t4$?? 2p^6ZVt5VD!%'p \\i@,ƱmRYP8fk.*l>,inba~S_,LB*ĈB8h, I,),,gsNt>xǸC0%Ÿtc@ zf qt;pۨp+c-!M9'ǧ0_.(扅"1F5^|InBxo pJ(V-~l[af޶6W׻vFK_z~a@佬oVۦJQZo޹Xn/?X 7?BXk-̎O]iQ]NwӸ 0:ؚUmMr =p4b]Uehd]U\)}Q)}yG?oF\I0J#Ɗ9cy/UU Q" TW,6JQk0*C(}_Cc+0& f ^ xcNpUzc/mu&rӈ%7d AA-E#Ǹ!yiJ[F{ $[YiqGuւL,84۵ S3TkX+R(nh!Ck567;ۊZANF hi7` aJlF"2d-ś F(O* ~֔cL⃨Ry]U*>D?J5Y[A3R%>tf@Sttjc@n9N%%թ⃗atJO ~Sߟf)3ͥQyBvbr`dp۲iISa>\p \vPD%+Ghwh0Z>]a :\m_Sb'ΉZ-vq%_.Y;"RGgGO.~2gT(a4ɹyF .cC9|I צrBd~oWy;lȏpIG1'CF\݃qvpL(ƒ@v"L=ɘ^BhCĈ#BrѨX1=6Ϙ I7dFG1%~~X_f!j50gNwA~&vUEW_n;80gfϭk2&:O -ܕhi.5F 8Wg ~a~ը3愕OV۩ףxܕ0 V| b;C9n?H(BpM}׈o WXDz\]~\\!ULP N.oea'aX(X(/m(Xa)8khS 76e^ՑKj];ϻG<uE{ǥ,[FFF{˥`\Y3448bLH%xJT-~߄ 6mcC;[v^zA--뛫x6o#jyb!^.~y~k;mN6dKs'.~4f-~7D&^/Ji9A ;ˡШ]+Aj`MejZIF[mD?czd& #ēզNf)]UJu Z fӻn&f$I&EcQKIAx?SY2C)/Z>i-Mxkh(m<ѲQꋹ+aO.c^="cUd LWOsb^+4JY;p^}ڊP^}ꑽf)q+i|¼z ןHu 6eH+L(CʳfśfphSaAI:8n>UVI3q>U l82| TjK)lD֘,3;Sg }7|:Z8AH^S.Ih{$ i&ʧzS,SXpj6T/BB1̘Ve8T/PX΄Qs x"| ԾlitI-anoiU)2VԵy%וྪ5GmelcJݤnqH$B)Vq;#l4A@S }LmBԲiD8'5hߠk0(R|^h7NcL}Q~y7:\f܍^7[K_.}ss7|C$%?߽5z>'øZ6ό~-~EsvuIۛ_ޜЧ^0Bι^u}p\\H`mQ*hJ}?B!“m7űVܬk1Qp$-i^un+BIlhdRFJQJ0f6fJ7Vr 3̨* Y|h!dقGCf6Illv[{Jsl˘Oh*e\h9Q{ZjGPV>h,P.||t$zC5oכޜ Y@R>M{qO UE ^IF!G+k1hƭ 7U40UhB9_EPoh* p2}YĆWK#[2coxݓPWZ~=P ]+)|0dY?ރil x jCS],1'BaHQ׫ׂN,}GK˹R+kC0|p@φ X rڒQύ#j]\!bMIs蚲VLk+M+!R%GW, i]עG X8źïF.,|1Gr#ٴx-ǶoC/ s+2-=4L0pޖ]1)1PZtoMa dm`aC-c=`5qS9'ւ_I494G26KtK3yHQj.\ #0MAsYG7SqfUޙmA R nQixNxCk 25ED5kj-E Ĥ]o (ˀUqj %oĵe\±|69 IUMWRk7k"dR'}bh.q+=\^g%n53_-hdGvgi>^zfׯ?ǤSׯfP05LWzTH $洘Z,iO͉h&HI+&X&nvDHtgH*P!?u%5fM5d4=IvK;Nڎ9f:/Ҏʵ=%PK*֮VGZD.>"Gi Ɉ\HYbZwXzڤ> Kd=5f4ωH#J(s%Qjc,SغĒ }LqXZyd#`(Aυ{( " ^ټI_aє)hRECuRQ\ x$dj+\啱v~ϐg9+zdz]i`Z*UcW( Ƶ[12lJJn o$UhigA N7mFGWb7nu]oːT&wShcj?]7Dvk d;fdYd[O.nf?9l } {mzx|Y|q}ZͅWXb{}[ۿ^̪~(gO,}[hV;s$˹2{hK[? U柖k۵twmr ~ GԼH*MkMg"))yg9_\Šf 9I`+IٻoN0~ohQ6Z {!bᣧpeS\_cpPRo@)}=a KXr $$O:Q菲Ozcg;F wk%&W=s!(s܊j5fQ0Εs\JK&hjJlE)$ U;r!}()Rv}0F+)ܪZn;?@׸tca;.`Ws5glJo뻂9* H_Zm@1GfDPc,RRĚW{j Z3̱nj5V7s7#-/|Fq r qBN,|,xK){N)%mg..? 3>¨uҲK_6ce2zN紁9elb+! njl,WXz.Vi) qE=4A1cFEAT`TTJ~Gmac@L<#PϥK0ݼ}7z>uj̛_2] Oaj\G){`V(D,޺~Arn榭r?~Im&;E(!yoqbC%IԶUeW}΍qU hl+BH+WarwF)Ʌ JOSN.L%5C 66a3d}++'-xܲIKN@mܬ m2]62>Fh/|#X{Jڕ3'B2f:oųٷ?qLiiR"Ƨ&PatUHfrB='?S2Q2n7dHHn<KiMLmdt pZ&+,M+~x^jp!bw\M,xd$*ǂ4yZYUC餭')_80F)/GOUw.r|!ҕ6@l*)v]*()FSju 5ҕ}?KǏj`/[r~b(=/ϭsUwg]qugbٯ>ޑ]#YFO8$ETA2*BUdU h8fA1"V&H[O_lr&)GJ@]Jnz0|quθÇrƗ_(K!h1RȔtAN"Q]Q[]htt1P )ܑj(\ %-fNPbr7?VTbp˻ 0pS=q0*&#`zbEc Ztl77nE{]ﺩw(?O;;Pvۻfm=unf?Ҟe~&yg/ǗŗZͅWXb{}[ۿ^̪~(gO,ULA7ݷ /ϭr=4YPȨ*O5۝՟8_^XP GԼ+MkMg")Nb7~~wLAƓ 3 JB.75J|.yjf[[U%RPYn.kТs܍,V*[QV5P"`adns!2ՎLIE^/ąi1rz_8ɛ΀FIʩƭGin6F&`NzܒEje+h9'7UV>ȼôVx|GҎ!-z\ZllSa~u'~xJ-M5vPi@RmǗ{O㔁h=įǽ)=Z!Y<ʻ(Yl74ƴW4bP⩏SB$X+hFg߶AJ1>?6]vfn+l.9ӤaE(;Ѓ8v|>q8(cc1e/F56-]t<9*y$Du/f6N N5N Ӂ!jsl>'q-G,s1 85m;yN )g8+2nΩT9ZV#qYPB`.ȳԖK%i/&v G 2ڱv3ȧDjQY 1yiJUY7h%d0 QL;FA ?y]ƪDpX,Qׅ{IC* 2kҹͱ̫LyO jqQ/Wm*v{5ˇ9g=aVuєEXkD]Q(QO 5 Bs/BG9`h`挓l3 |aT断A醀@0qmͶ#gbzGS+xnbpIC p^o:p=?d*ӐlqSqꙻjlAp36(2~~ G11tSĈH&1DWdM[|Ni%e"rtǩȧV!ZGfzF~ې.2%xzF/VOG'p2jsADGMJlm;m=mb3geR~6UDP Ђ(Iy - ZN-u\~Op[uRꔐ@%0t`%KA/MOIZa4i 6J ju/#VQܫbhl[9e]&N6ukr͌4 ,N 01 VKx٢Bh|z W/7@@ Nth#魊XRe|.;+|=MR`Ogi:q^p T\uS'Vd_kM6 R#v W(7Ȼ5rҘ{$Dz9k /@ab&jƄ06&"،Ƽte sWEd G@Ͽޚą(dF|.ݍ*D<rO~GM{zb !?ձJ8艥D`ZP LoI hisA"E0_Rkw2A_66K/K<q'O J)8~r@HīѩS˓7<fbw/&X'Gy;Vvpi*4VԴ֤Eka5imcXHP H$azhn0x_߶"ʒ1]4 ]O\; y {tH;(~@;U>18ך3i(Z(˟~Gq%rع蚫is{B*.ƵMbma40rPyqM"SID.zt焟Ur=y]9FXdˬERqbFe2bC] Ҝ[L#e5Bj} chfvS<:;ϭzI-jٳj+DH=X8э94ͽ} !dv5*?MB(ղV젓4Ai[A7Ibdik!E`uzm\~V'S۷]]]__98<:9GTZ<={uyx}t~}X"Yhjrryy~yr=@0J;Trxq٥U"@*][jƒV hVQ{ lln2|.6Z7mS)FNO\Fi4ys&94Al^@H{pRAouoG2`^kunsmCv>|UUt6 K4d[4&/VOF9] qfӋj{Qm/EնFնyZR"JRޥ:&E wtKTLV(4H7&Uϭ^oThհgF7 ͡m,G15 RDB=EZ3e5Xs^xse-X炨vwxuczuwX5 Ea a+qo~t^om?v«r>]x:)Kx.S,kzәaʅ7ӟWI;eChY\˒p ʴ0G#@~DԬZ)*x(:sAӭq @\ p\z\ Ə9JuD%UA.W06uݡݞA]=0£%ei<¾l(Gvmߍbޢc1ԍ"VBb.uJ[emWcثtFְbZɑZ8@>zͺ5Nέ6RzU2UsO9(-RCeYd^1Ֆ0j##KBkP kf- [L, )ϢZs=?kΊypVHzƹ 7>ۺk|nGA'6:oY pPPidu֘oWM?5 S״M ѰwGJ0D: dڸ*m=:'w:`jnA{Vǥ|>H>n{~8=wҹA.'qt~~[FG0 o mTti4KnӥkMk0u+q$LA֍pIh5}l ֈvb|SDG#W&Կ7a?K{w{'o~{d Os/-pҺc8R xK.Fs?4|_#[ ޫ Dϭ}h ?%}={?h'^d /kByAHg.+x%7 G[E[*")&@n}kg]&NW]9@{̃vkSݡqfʛΏ?Œ.[N|&Vg`J;;fߌ0ge;he(t|K6vx$ ROƿE{F&#O$Vt$y`=h2,zn?wU|[۷}EWSZxڥqs75a';~%򬒴^x;'k'Y>am6).9Fe;J`fبLgĦP)zu΅S E22m '.:ň<(g 0!."W,o-k6˾\} RTu(_tuI#/& +1-6 -֔N{z+%cc /|pzS1b 郑Xp:m=!-$G1E=NX*XH'Ps{͸{/p:;^92q:]ā;G0cqژ==NSA1R݄sٯd3;jڳh ٵdJ- þ iJ~Nw#_ 5 ǚny@OHEUC9)t+}pVb s(×cH7-˟g΢%x QΕRe ?<(A\ǫi>)ikFQqT CkhiIm#ziDΘ.2N˖XӵmlccD f*a"L@$ٌ.,0CoBouHDwS4G{qh@ˤ?BLˀp>ʼ=Ng4P !POڠ_k v.Pr$ v 뼀:k[Sql@P"$ˈ D ʆShJaˣ ) Y彛M@ioLȊk1ϙ]4hY[eJ`^fJ(֝PaaB'/k| MH&_0:AVMRl˫^HRFVc8RRH\3(K%@Řrɐ9GUH2i!*D0WD뢀kRG3J\ $$+owK+[#AZc"\~IZ4ٻ6rejN yڧ}-W7x6q2HIqZdhJ&VI}R\kI˾pliKvlj-P צ֗V9\ ֪[b49[2g sqCRd]>s꒩Rf)J98^U]hDh;S3 c<?gNŒMfr7g [}Wb{B9cܼ~&܃8Y) 98L}8ٙ˧WypnrCez#;xgeCsc{ [|ܴ}AՍ/:`A?Q!xhx1Kfm?[5:y|2kPUANft}"O;bN/32\D)VBr`Z>k:tC8qX43Z7gpV45j1[j.tgL'݀'5m4}wK43}f+y_yNsy_9oؑ)8Oq Ys9>3j߬7o>޾ ۇ7pҋ"6Pp&: 1̰<:sJ@/揙>h 4m0% $g4.]&4AƭRpΪP*9~$\'7@]AN DNta}cpMMhҕt8qYztJX1%DcdL6hJ^sӥ嵭><>w!mf\=ffUm"hFGw/O<  'Ihk,uzÍڤfns6ywo\Y|1ƅ]Ƌ)x!(ReJ8XPu0M;lFr!]'v[FΙĹ2O F#C(Tq[ #› oU#q6Uu"6( X72Dؠƺޝ$u=0 }ňgӲ197UT ԍĎ1BܨR_ EP/¸}i>ژ_gh1V-1^=_]non_]X#VmL33)N_䉕O*2yv&64:pp5l qq gO/D#"8g9XqjBy[^5nk蠠j"<^b}MQ89;P'lS+ 25יs{r% =ux2kwd d#{Wh鮾󪧨/wnnӗ߸߈uSWBw֎7ex ZR ng5 εOϑan`-?||KۚO1-wtoC& [X@Ǘ)\Մۉѱ#BӖ (f[E{rooPBӖLͪ̇35Y=6dC^5o޿}hΒ2zV)C(zf61{,@W-Q$zRQhHV>yWM4Glp=ʋgu_sS&E44z$]ج^4瘜d+ߟ,qYf]',8 .Xf~ECԦ23ĉ@w 0'`ee1q.n(o훶h4utNyJ 15_铹c nzpg8CrnVkfolO}ѐnq3]9h,6f/.CbMռazFPJ_F/1=vl>M &G$1֌(E')9g+4cgߌ 7BSϻ0zF%^1%8] ,uaN6Y96}&d4_L~,ac_)I?%xg4BY#!P;RuzQZn,f44?6Z@'r;ީ'|c|$dxĔ)*1?n_R*"E|4NRDdEM Sz5 *r\ ^.WN}8*%&f ^!#EM}4xoxzZ/;rhXf3'cj_X %yuN)zCTg-k˵յsS`F#AJ7D*VAhlvU^-ׅJՏwwQۛfU@zc*,F1ބΠl\딳qr69]E_(u uBkkZ\(}THlR:Pv)`(yǗݼNݼF7vWUm a\U1;_UD s)쏪;W>>4ө]B!xN*NI):%^w m@Ю ^R5+[+1ʋD̍gI l.HK?\m4qk6ڟʪKȟ~ ~jlcDalZh:^C΄`kKmՠG尊8$B#cA)Y-GZ߶QN=#'}6+g"XUERT3cZ1vh Fk㫱J?Yؔ>8&>)ٺBe@6`;cK]"-;̭uTs rPv(XAK7 ΂:cHB ajVՍD.DuW3G1(BiqMi:9 zbi뭣FlMeBR_CkH*ddmFG#S.4m!_6| q1'-JN G/6fG4 Dʴ:i!Yq)Rx9"e ۸"ӗVmgG\'LH>c`H  y,Rj_n}WCWwf7:F}4ع~X"DYqu!*#DEh`y:JD(+Ƚ.B-VVs[+ N<77>D㴍I"4(@"K8/XbpK,A*+`)Ua3 o %=Kʁ]؄+* 9)-W$9qK)JOLSﲎ'5Sn"NzUg= 5J epץ8>TE;XL }*P#Fi4extr{ƠH=ދkPBS}j1{S:&R߾;kyyw_*YhY8OOx QKB;AW؝:q%@3d;ZPuߌr ?BSr L:)xwm N yW/ZtJ-Nr4pQMXٖY`4IR@u8,Ywt& i>S& !)!11JQ^ t꫌zkDz`J ^Yba'B$Ewij#^˧'_ۧqC]>=}Gᨅ NW6GOOpOcz YA ­4|9*8Hh#BrpHpTHp%4in4pl$POH֒sg%8߇evmҷ_R&7'+!jS凡CGbJc2@*;Pîgtw:DmF]܍'~cG1uEu.cdio !gZ"n2c}@039. 2͗]`'3]~ŖdQ/e[dGɪ_XU,VIyu` dlؔ.8{m?PfM] q6=C*խeNL}VDr`U;LCw3 XcyZHwaB}L\É}mxK3Jgy$Y^~I^sqOZvYBp!Ĉ^雳w?BH=Fttv4:@TrlD(:w 2b^NnX!uNvYB=YB>v& #~y0߻͸'f0Iss95M:"fߗyL9vǑӱa15,'y:hE?NI.*\ڞ-0 ~l6|wk=!h.ⴝJ՛1i̒(S4:"eDE H D"H Bc!ꈔ]zMKH:%h\ W`9r,AOQ bAwz4UwBQO8T3!Xد]Y"G"#&#ț?-;z(7,_X 5qͦ M'拈s|l[R0ɒ\TeL0K,a!罫r˲y: @\ǵq@rK&RxpuFfDOxu"ʱRXϡy\gTPQ!T;qsMy B`y6p~?=Oc8#B5#p@Ǹ.:^k+D4(?Q]Xhq T^ۤ"Kf$Htsp>*'&_f*xg=v\aeS3}fdˋgXTA8ꊄUlFYv)ωm2/"掉nVQњfBs0$c2OTJ،6^|s>9\7ʊ)[xz&҅s Cu4pۼBvxHJȱR'+,eKqr6AQSYE:E(BqS\ -_Hf&XjzlZ+U+dW$QߋO0m6ӧ !ffqv1KtH4"DGP XO6ڒ??,a0bT`lORJa|/]wK5Ä O׈JP^/ u@QE R8@Iʶ#޹dh$/w!H=BR %ws "l圑m;ci\_,%be#^<-oa`99[m{0V$0s9s>q'Bbiͧ|υ=:5dz[RI%'Ƭ"O_-Q:Cs3u<xgS0HZZ ;^*N(Z"0}9}/;xM:p{?|0'hD%DlQ9} Bؙ^0Q '򟧏wV[{c,*ڠ+,+3XfÌO=66h{AOd 4mV;S*iMD+_oa91wtwl& vhdY+9M/?qKX)ۦeʽ>T|=sɟ(>V< -?]i{_<:QK DHt}VTXHE@&hp[Pc-p4ppo?:ߞ@:[kĐm@ C(uzVt  S3 ` ?3ެP Gr |*4n]?^Mp)҃fvm3id>dL:?X ?6J?cK>ak V`Tq;LNGC!N/&SV~}՚0'Ţc~e۷ { T?ckʾ7G`ѧݜ>Ֆƿ&>5G4kUG-ڮG Y|6skME4J0a->;FvL5oM_nuHDQ ᭣fHl+rM](Bm0נ&0ݎvS v*\-PrPj=8n̝;@Q8|Z%=(XVyQNA6].E V6#BЋ4ʈ$g$(4I 1`,r>\>A cĸՋ$6Z[z6V ;!v+9[嘂ٛFoVCu5;kpxfFowyPFY]"i z>zړmkzsޱ[U-ӿJQ2(Ӻ<?{rWi|l2ӷIffsSvÿIpppp\\!kLdgf ,&f8RdU!RĹ+&U. *Zz>V \_ >,*GtZ\@We5Cl+׊yK:.?Y)Z\_m+CL>#ȏ )6 )-3) 8E"ڨ2%+T1T&MVHuF~U㶆ﺸ9:XМhػl46c8 [g WHrv 3> aq&_㾗FGijF('y%'asOW "ݑӆ"`Cq3OA{xuM~2 Lsu_lIHc ?v1wnSh; Kf8sU%T%NT&3ҧ1ǜ14($_ dB?-|Hgp5 G+_۹Ył%89 ?+%*`Ԡ3fE؞my^G̛hk>T=(PͺO-8+Xjvq pYq6"_ɫ" Ř 3>y,.wzʵhMItX߈֬GH$#gR,ߚyj9KF@GL^jڒ#ȚPJz$Dk&P{Q瀮Q44S$K {X46ҼLp&N Β+l}>o#]=;'{>*?e^@"d^` {^r_nխPeΤ;ھAۀKIY.˂>. &Pd SbEu9,أF|VFunWs V0{A1wo~BbN)f;/$T&}MJѤg`P?KՇ4b]lqM3cޘXy!Z҆afR;m֛⍙,=EC^-ɴ)2-SjPxPV a͠AIaφdֿPg u??F^Khh//6Ln&xo| zoCTs- 6Z7]~lx!V'WB-WI!WQڧc=ƽ1ް3iв%g쨜IʰxK!PÆyiCB,V)Ƴtw5~UDM\iW :Fɡr6z#~B킦&wU}!_w m!I)JKH RL2<90\;ϰGz$eԼ^d\*`E|B`SJ]SsO%x)LʌJ VR)Q"B%2Ʌ\etA 7|UDRԈR^LRYd"AQ+)a\0p4#ɑl:C!s Lg˨.maӤ7qڳfue6.Rn8mU~fo* wdpG6 wdpGn7O)J4+,FBry:$Y"%H4}b;bź,6`OzQpEgCfdYzn>|X >NLTSJheZ4Fs6vkIX9<2dUlHLE&(/05)5{@q{k&R+G6rdS+G6rVntMd2{ F˼ey9!1RPawM(kr'+<8U:j'p㫝%!C_Jt;V[&JKݜK Lu|]㞗*"$lt[N!(Vn`r?{6/{J*?lŞJ&;S.eIN&4(Y%lR/nt73r휑k\X%r4"8&ϰF+$-r n vsDQ(JQ%S[wp:Y/ۏ_+c @O‡p&!ِgadnGeVrܶ,Fx!G2FR. }<'GFA)l9# (ʀS,Dʰb#l-XrXS}_QEZ_9@^^Hm@ K"O\FXJ5SCh-פڨ\%-y&*OǏfBeL8H)e*V+iJ1Sé& jL uţcJ vpLrk Rre9L5MLE$t:']*L( VX`2 eY&3KsPeIƥ<3\@("gm!d˴ׂx$9r).T vM΋1_g5OKSj mzVs+] W,'_7d%V.t.P/[瓥ZQwjEð%wVNοܪ[u/k1R`]y z.^UN B}sd4UQPR/ f#Tgm@OT`%P*b5 ).S?rLJ5G4<ϗyC(]cץ@sjpMo? ɣM7 U)̻,r6sfݤY.=l$NG! Xѫ;LOSɝ mٷ2(g<۰+GI$AGeXZaV2\kV&6[ǵW4X }(̰e,.uȰjғx9BH7:9Rb{C+8zȟK1$R\CO czȒ 5J&ieffKKb>x_!b;.;=^ ; Vkh,uxDQK^*£.a :&>)G79s_v?]ՂQ߇+d V=K~pη76qlf dv~3NWC;+y $쉉UOưf󎒂{|d/rUv6hv6Cx>K JSJzү.quRpIe1SG)zRmg Owj2(tTn@ ug p1P-Βa.2zB‡"0J0!N10 qq&!`FSSÕFp'2+hraAP zG^8ˏF?xr8TrQx')6Hq%"]>̺#݌&9dLYSM$xGok"QLAV-S@èp8Z`LփҟP!e9vkBB^FRXrhુjF{5GOqKaeGJ4%mu_|h)]"ү1z8HEGHk"+)-{ڭ4ps$7R#Bu$VE@I&X-Ő1i!C(acjR*ŰJ&$p|y}ԗXR3IhQز4PP#gmF{pSh/W1eirfJ^5`svj$ؔIZ{DQѫЂ?L)UЂ?\JZ"U1S?+-֎􉿛67Qwz ZuDOn SM)J`IheÎ+4Br¹akSLoM Hā>jd_>Ն&9FI",Ϭɍʨ&%k*Cpc!d8EY&Q"$NYۥRZ:Y1T%5DKIfq[!eQi2JʣR#I>a"r-SjM9†$c p`FT4iΰe&yV@az'ֻ8mq*3M̷,1,_4SRVVOuнN?姇\0[Z?vE.;<'7Jk] NDak|b%HL/xX [M2_|2/}?sBd;hAMlͧİGmda>5LU/ #WoWTs\s]7DbuMEA7͐r<-ߤ ޤҀp)RΕ־S}nN!TwvkBB^.SDIz-'U}ҁݢHOS{ WlgnOx^׌cȵv<Sg~>K^w,=s.xv{w<[>9hYGTv,t xx-E1/]MBb\rד gAK5HJ#Ẅ́]o;b!} R/䋿[?5c.6RzH@Qf3ۋ5cg#} ň=}ccbavc $G8:=n ~Mcv~zZ@Ɣгޭsz5z ?joS8.TrWRwOﴎ)Nc̋Sl˷76qRlMd%rꞂUxqv%ߌ.LujɗSyC%"oqHbpk2ıь,C'B sm8!yW+Ae"?щRXYBE9G SdJ* W,I Lpjl(8C + 2rWJpjM~5:X!i7yBrQt(0V"5ϛ2x>&eU=ɕ3ɜLYԴByx όq ,5#d XgFpzQ(8՚0-1 ʏqs$0R۶ o$MK[aiRl;.3geӀsbsK14MYP dPp5cTx;.dy 7cB>x1i_YaŬ9=Tea?G%pz*ȏ-T&2elk?E߷墴J%W u3U'sK/zEޠA!E0b4vgښ۸_aeONU!e{S[g7˦T `3+SZ?D 13JLph42!(7A8opἩ dt7 Ut7q(9Қ"3t0c +9:>%}5z=x eV4]% bk(i^mѭ_Ƀ,ϒ`;vcU\si3HPF#*sEwy=Kd- TVXLwk% 8#+luf7뫋L.ix $.$ղꝵi/dAsťμyK4cc`99Bl"D z# IYF=B:s k2jX!PLz q3oY'N a3X[b$bKΉFCfU 8Sޠ$cA캾٢ ]<o#m@p}YqK9 Mo 9rI"wi;VM#ɦ``kթɻb-TH%F ICT͜1gEOi:~B`AbņD#ފo%a*3xkzfltGc%iݸ ~!$)=F 4!tCX fՒU1c7G')괁 Ay[(v,mE=^m44Ƥ7Ag@Eu-{Vh`)igEV*o lWݳJIiD+њ>xo ]!h10M|@'GG:$3=QːO j{+φkO"u >҉8 &np0@b>fWWKk%ch5`V(zAZ ~iY`4C{ydF*~%{b=v-+_]<2`.{Y4cșCfء IĊ!s[<'G79-V2]@=~@х0.ggZ5m9F4`B9-3<|y z wg7T bL>!͙DYB'VY\;iR>,/6" rsUImݮWoWзϣ۠Ƀ6,R0olAn+7*.(#t-b(z%V },s/XBDb!.;wϸ WAǏ}nv`TjL̍qrs2 / a^8)˴B+gH9r3S:PRLmTI@* Q m_S38ʯhX ƄXMa υ Jx|\ۑwٗ̓'WYfCPKtH4@ >d X]g#3KBm1sؘM~4I? f !}t^ZšM"e"ZK? Ixu]/|+gYfJ~ȄCI- V[k~üj Q*U x;$?t1 m#Ŷ[Ng Vd01$:>+&acmfݸ WL -|3⥲Z#Tk3Zb,a--rlЭ5wzio8_] Ul=8O#x?b˶BpL}o?7<fnRe%@9iQ{_ׇ}01o[?xp҃%0;tHn')D\i6;a\}sV F1q!|']yi|,*[_&>;3g'ɜ8*sϜN-֘(&K8PRG(Y"(Xn*n͚ˤԛL:˱h/|&G9xxU#$U bm:)MWǬ~ٳJFvP|M*Fu o-ܭw*E)< n"m6漩W-ؖ 9w/U-&k 2:t9q\t,_nds\x-ed>S9UBqP`M΁DL-s:'s٘ ]IT5P@/˵U@2.kC01;*r|) "ah"2pmJ7) r-,Oɜ5wY84:ϑ0gka$qWܮŕF)W5y>.mh ir*m+(;7%3k8 eԳJK5? hU} O2ׅG*%HT`*'T+ KkS^t Pm-3dbr샄nj_48'h:a 8q.8L2:8~ wA'|ħqו} Sa:܆ޏ_ Y&@AzPg(|kj1[+i?.ܺlPXt4gfۚk%GR9HD=ϊ9lϊd%%Z1fv|x"oi)ɤF6C wk #qٍs'+{<6wg,OgylREk,ް~e&q:|eU'  R`L|vDC]=̃! YTbuQ\QSc<Jc$˧ƹF='^TxD 2S4ӝ/dQ9шOwn'RFHMR\c5bX Zu=$un3<&Z"J:BZݩ# !OzdZ\PLN]2#!V@I"ҩ[U08s:u̜]N^˩ԅ8<$=.hh$ķf Q3c;bAr++W c/7_S'$?i@H'C`^SGZg16 c0τ>23+%Rȧq~V0=4eqIC ؏n@\VҦ;ΎIOjDO97&_)3#ZZ1gxpF5ˆϤUg1I7+ipI*XnǹQ89̨d~MЧWDQ6C`F7#s^uNEK HkkEFU&`Nf:ɸRf YC5_Xi0=(eow?v3yYxg]*&ౡ^%/o΋_,t^߹i+PYoL߀iȶR߄ i0y\xXuM?SRzܬf$䅋hLqBnVTS*>^2Ɂ Sͅ;z3R\oPVʦvjf͈T1GTҝ龞H3 y""SosM"^%Q FtRǨݎhH$*m-SvkBB^&=vKW&P-z ͘LPBwotxX-JFQvp3Tx1Z៙5~R >%uBM~zLOn<;T6!҉ @ԫ*;azhÐ:f7ʁC@Q)MNAU"CYl*F=&ua݆1K`Q8 "b0M8ik[֙J[CGK MTq` .KXYuȊ5<5.3UӷԢ *\P5(=0LP"B $b#424Z Ҽ]SP^)spkKS}Tِ.7B_MON!^qt[ш%B\ߔ+2FՁq~bo$[&#%ivw2QN)D52IagGt"C0YQpڶjAli JP: %<KO,-^8E;,O`tT?{]zu(T1 b)nP-D/bSys:F1ݔD *K hBB^&Tz_- GʃIv;) 0Xmkoڭ y"zLq0ydݝ?%֢[iڙ7/ی-/0-9 $iTHs%j出4>(H0"-H}KxlW0m>d2C~(vs̆;؂RL*ļ>* d=Q`[(=P]bj'R]x8L?s d4*̏ye4p*()Ca~=Z~Q1r۾(.*6+,g?o/]񜄌Άy~>Sl-,_/ 즢7nd ڑlD"K)8FzJ F͊9\W`t/J$ l<~Zf8%TnKaWz)'@AWN&J!0 p/s0~YTmXIX_^2y̲}@k4(U49enкy@j/Cz|7z|7U dt7̑F"g)EH+sεY92ˈqBz1L`5z=x"*xɕԁcZdj?c4gz\(Op?{Ƒ_!Pjd{`s#NSB?m&#J"!IPԸ^]]U]Fp|Iw/ ^D\M?ן~ :^g'ȝ޾4L>)@szF]oB/*{F]󜧐oB9 k˱ h+8sNșx SpU1J4ξ.Ԍa^x^-jQˌ$ 1TĝS b *zbR ϴI3 Ig%FFfK3MipR CTie= x* y!/t| }X2Snyi*g hrǀI21Gj5/Y9P P m-&itt8 C}qѢ*K藙"}aܛ\8:_guNH˅Ӄp35~5E>"g ߿g^$ֿy^^3Ъro'-/>\\qvU߀7ͿsaInwnxgWVÕZߨ0z(y.NEAAkDņ&NnGs{ *k%#9i@Yd!5װqբƠdQV 9skh'ɵs4_t,>t\$DP×G9s=rA<)o%{p7Ԭe @6[ղf9ڋ篅zn{sZ/1P6AyR؋̯J1wK+!A#H4cXô]r^5YV6{g'(ުѭN#,yOIDfv}_|g_O>foNO)C2B]e64 ~M#3Rf` ]7%P9sXxN5+ZEZ ؐ23@^^^:ܗ [<A\s#h tA(}+dDO5y_a\ *ӳzNy1KȬJ[) GHzdVo(:?7T0t(36:"<ߙp@PO04NE҄[,9Ԛf`B O,TfD2Ib*IMY-7ԶՇ6Y˹OxT镣a휋~G9\Q(yF՞`|WO<]cִEד_Ÿ;[dfV#L1qvblFE펯)uH9}׻Ppa7FsN#Ŭ/=BՌqw Ԫ%jHzRkXPk 4cssOu /Ch%׆Hh*iru9rl͡q29;\ylt6߳ACngusЮ 0m~-]_e ._cyFM^<\iDxc6\y{UV»B0&8YK$/.%29vӤiڭmD]\vZj>$/.E2h}c= iJt IыZiڭ Lĥz(Pīv&^Ya+ݲ=L~nhiF^`rJheRܫQAy9$~U"jX~j0HiFy(orSq ʹM9k}X}l[y }sޘ-.(w*>Ecs FVϸ8x!T|'^sŶólڼyuKMVL[9xp3A1iϹV*r;^DO-0۱-kZrck`}w318v%B$HK( 9}hN/.z4H D+F9ON Zۑa˰Q_ƿ(.xX?dǹ n;N@7>d`M|͠Oyn]ӕ!ȔӛԍƧן&ۓjg_ͱEd +XȊ}FveR+ˉr5Fw?-Fwjh_V)ohv,Zkwe͍H0eS#gp3/v(*T(^@P,MQBf~@"3 '(f"G7 6޺c:& P'wUF-K%m2V&߳_/vb],?+RȨWes]#F/[_<]YKD|a4FʰePyӥ_Oll%s'}Z*jGMM,2i֍- 5U)Cb^:l `rSeR, JKUKd$XJ9+SwWI_t9]LA<4e4(B/XWi'O„9TY!0VhR J1ƄaKX)a:wJ-AUTf$L%-RVfs/RX.Tm{Gk2 BlP ?Pik-äu28|s>$xhe&4i2IN^$7Vf%`i ծ\0JQl`Y²7w^%fkXj1EYX }2+^t@r`HzLp#kjDl7+D iu}6кLhT/W˜R.+'d$i挣R+1Z8 "f*m,YB1%ȺBN.M!/BY?A;?0aֿz# U̙KdQ֬ ān)ؿNnէːpRcːw40f$rz!fZCraR^{peK%PBDy3wTckq R߻?u K^ld]d~O"7qs}O& ʼύh˕3C|`+;EKLM{VU7mJtK9Y\W R,~|p7Ym JhՒ$i^dFa$0htP\I ? 1Τ꿺;`;;El"?Z 8; qF7GЉ5aIf7m}/CT"'#kp NtGx>cp zxW0d}G1E d?[%pC!G[YC!n/@(h`t*u;] !{q}%>Cb|!h="W ÂwXq}{yL_j< Cƨj>p*v w4Dx9~V0@*D@C\*SЩ;S gb=+k9īG;Mx< ;.'#=f#/sWOv2tT #=`DWO1qD!Bz&i=tAKPZq Z#j6*I(S$E/N(M<(D2Fn.Q}X mDCJAWGT"u:=#b̎A Iؚ*>TaOuڔR9Q .*L\cŊr^_;3念j/G>X7x _義hP3j+gQIJN+2њWZյ p+("Ъ.Qȭ$O`9_/H~tTw]@5EzᤙŰ JDI5TJiqj qV[e~ eeSJ%k<''ni]'w]͂1"E)L+&12aT!5IUmA'^s8ebRQ"Czu}Y$VSalEUIQ pp1@Q ka`R2Mn +7z1n'_;۞j cy76D!k{`]&+oi{nT T e:Zɼbhdi:T5sOrbfYGf4,> f>6،f߾MKvݑʍoKVWci-Y>W߭rEO5co˫[7_~|1 Ep={z}@ [̖]O=mKh@9_MnfW>ET8PrCT hiA$4ښj<׎%8- W|Tl&v=MVX>ZZ=w@l kR-MNesifg0H:|ZM)R*95=:1O:Y%jN4QgDE8YTI+!rO ID~9P9gpc]^}j}`Y"R :/z<(j$;#^_L./eb as,8;3 6蓒 8y(J˂YD U֨/o 3MsiLTR"A5V!8>Pv]:DKdA; [9ٵػ<{,dw-Y!젔#_' =鯵LjQ0Oc~稼Z:/S024s9,0`O~=Y9lxrWT-ގ=7W#*m4ˑ.'6RNJK0xĐ#M|vnǓM-f/qgGNP  vP`xRxyWּ"C-ru!r-tHAJ;X^J8&?cZŤw|GXanqlcKj(+B]-n9=S]10M z&_@{۟~ D72xqfoS qq%X/2[0Fd1HԵf+ ̇|`̇U c> ]0/3/HI La VNVFeMKti~C CT F9Α?Zb+y7tBBv|X}̇ 5鼔7}̇5z̓I,H%X!d.򺚔 tt% f~Cq;g>~Cc^Ae> ]0O0sTh[Y֠.1Y~C z3k3QE) .Ak+U-ҘBސPlKi& `dYFiErQVsx;pH{pڗlF jfG/M(g[{o){Xd#g P˜dY^dT,`Hu0IK59鐫SKfjq'`JamcS728huޗoN\iDtWk>^ %0z+PNs qzwmZ#>{,bZU>Z_UlmkO.w՜9vgGie ֌]C%Fד$R%5C@"[KfI^ß|X'E_֗V57fzH״p֑BW`0+gi]Q2²IRsJ ͊K\0OSH!gKVҡ=XȐ3HAA|&Z;xXR$eumGh"g4;}8#T)}<,$pu㖣97`PuF\k6?-ʂu]$B;XCji`2PV{ @:9)tv3w9`O!H@j:"HOdw0/ˊPgp'QtlN.H;唥sAz?SKj@qL= KrY:\0A 9׍nt4'f'gbKjv)Т0w n,I׵<u Fa8CsSSP.u* +9/}%njb&|eǭsC?QOF ؜KqF̢?-oh-Ul~=.㏰~~^P^PE3TxXn kjEI-$U!n5!T"Ui&09%%)蟞Kޢ?ߎ~@Iֽ~:ۑz71_䖣b9hq~/F*yֳYs|9QgouM oj]xSij5-mnVE/rMΙsi/x5eInveJ,P )uI,".ŮV CP {,s`LpcF`q̤uyx>(IS!*R z0Ǯ0h}ƪЁҗh!щ5LVKJe),Nhˤ!!&p/\g@ kv`or&cW J'26tb.^ep.aD{Lho n 3 !BeNMQb1'L7fbX0?m^0XYsb΁FBNZ%N43L1* }34R`"gfB{+!S*u*0xXxYLeDX3xA$Lip< ؘS@ (Uk9Vk\ r-s&`(T0ڦQr&f2IB$w5fbc⸙S3cc&C&{% "[NdL+@rDnSoFץ( }0$jQ߃Y( J|.82J\z n,*yg]f2,D2@(˜ YHU.cn-`SOKda]o\+6A* 0 خ V&f2IRwyZ>p0us4mQ@1` ˨hJi*Tkv f*v\F 1~0x0l5 r%qM5LE"yL@?T)<7}9_̌uFayg~r{y -SnRwww>hga9{{rśpk񕇁^ݜC£jkz3r6_,v2O/ _(~.~ߙ*%?Np}න'DoNBBY Vn.w8n)qJdH'k)c`L"d_ZI%(ߚۉ4ah]skbUQ"$@M6XH3]ڂюg%ܣwW$"A^B}@'P^m.#M!XHנ<ɛh-(vg?xWhY"qu2t9]8^lAe͠g3+86e=p&dbYW}c [Xm[TŠfReC>WJlr ʑHC{Ox`E-3jMOQt¢6(^ojH (Cpkcxa H ҷg?M)}(>R9ޑN׊xw?sJ Z_u標_\_3nmPP;u IRnڦ=hN0$#q+{a ʏaU(Uه/EUkEMH9oMI.8qSH$a!w5T#k}reؘݺ3;-G&,6. ] ܞW}3Ve?,$-= ]5S:2Bnk~ɝHT+U6}z^&9:)[XDeQ͖-r0-,I,"*9cёH¢ʞg,.{Pd;*A[U9jVn]œ=dn]5z.\wJvFgW%u[y~KM^}{ Zg_P4~+[Y\۰0BUf#9"iZǾwۛ"ı_mRx8 I+֏~{Xbcg44?c|kB~6ο8z_vQn=a짨a1ʴnBpX#0"JBẼ#*)`B;VoŮ-|1h,bV.~;;9)#V[9e̴ği"8-HhoQvlǛy/j 0j//p~pxrcdQŭh$=1p"cB{X_߭wh|6 w,2%_A1#@|2v~rd|sb'#x}'Aσ> ݮG<(Q#s0R`Xsg4LfXgB)5c#ˉE~@χ!_gh$tm#s׃e]F; X6&]\f,˶md[1߶$~(-]2D2#,99B cq{J(DXsA*) $zEb}9r/7f;]/sWrhdCчC\r|X`^M$~_4W7C!<_#3ӛ!|Xر/?^?L|x:_z )\mlB yX(}B( K0 ˻蛮pvS5oGX4^Sc^5R/өi g6QOXK"hKMfVC lsL'5hJ0J ?HauNH :#jK iBHy==*HPc iTI鵔x?!_/~M*ƙR ܇qG |2pB<yG:&8p^bR݃kO5Z(&:2s`g9&U}uF\m4t>z4p*6+ Tu;D}#1 Xa}12Ddh [քֹ #Br)U_//R'9uJ o+3cns4A,o3 (N0E$7BgB:mIУMД~;K|~Slonv:b,!#WjOr>[^f6yEQ,Q8:=KʕlN~%X >;ܩN}.wrC,Ql gmЍP\1ee.L[0RJR/ mIv~>-D4a)LFN9*G ԰ u_Xfz/EM jx񜔘A0/j,S,SgH*NJ1/V|Ҁ-Mh|S|4̇ohf* , BFR!1jD7So.չ)f[ */B WS^O*8mJM+*wZ-(4 y#m8 qB^P9hTbe;u$N|oV`}^wR:&e 裔yHAC> Jms~. GLk D9L -2& Aai˟OQ1/ktwy 'Bo_\lq37k~=UyGW3F_=bY_꥚`pqP}\hkcQ#?@W>P'?\pe%V! j,?+^ 7Ѱq}OՈLGHj#>i>wA*Ǫ2F5hS%aŪUDJ*բvA$pɸ"LV<6UMǯ䈁'.J"-EkDzNdkVkjf"9HLFLaisDZꥊ1ޕ)9 JL5Ĩ\Pq$9!yF162=Cea* [|N?s.}R_ۧ!Ղ`\~:`ȦgԪm E f@ >w@s]y!+Qo^Ҟ-f3lp`} lOiڂgn~> n>޺|Ŏ{6!tQW=ݑ>tyyvj㿌V?יR8{A5P,~wV;7Q V\uL u=?~S6.֭P Ƞ\aG}ys_g,/OI(&e28ݚ.gI}?~tg(hD/, fxDئc{o j+:?-s`t&GKhk$)SjkTk8S # C acGß"v+B?o1Z&3qi.Kks ӇHä7m{c5E0Et+wJ; JG:i}X8[;YXW0 j2A B=\HҋbG&Sv\B'!h"2A'LvS7S&5cD /j>1(c!wbR6QLn{$փ:z aa١ٽ^9֭ROl)t*<9x^c)7{*6Fc{\yTN\rU4ȷqZͩ0iPfՇAգLsVF 2⋈_EEOQ?.h)zHY1Vg [AdoYm^4H]3p!+W]C]K{gyZFAZLJˣcȐSyhP3*Nm%ɢEOe6SH5b鉢t@+/GQ]vh+GQgcYlupp&t *ԤfMW;گS 4F!4Rmh յVUmq2}6Z?VYQ`}1]AM6;zzwVqMѾ YڲF Rҙ((Ժ~SpZء5Քvځ5KIϠնkA`{ =3JlQ\g]]]w/RoXc['~7U%/݌ņH ln2̂ޏ=C^ڎ:wTf' 7dKM#ŭ$[ ɑ\[ E wbZ0X1BV[ \pkXp|[I %Z!p}ޮ AcC7p4ImN 0, L;Z٧'« (6pAiI6 k},{ V,037緹fs<֍G{le"7wvկk>d>% קc?[A5uAF#lb绨iK d}/+g%J~|]Uk$䕋hLœ&ɣx?gcRNe]ħD)TO)AE*6$䕋hLidŗTֵ mk (9P' 2%ѹPBnV +=WBjTb? ֔.mv0eII&|ѝbWF_ JX[iEؠswh 34by4P335c-0T+$ 6Dyc4 O1M> !\Dd&b#:c4nxrޛvK?,ڐW.I2%űmLη4E@5SdN6ŕxzЃZ؜M 'apry-]^ |p 4 ;~yэ%ziڝtFeP\f~{Q~y~p'j,/oYf=*7<+i˓cސ>y Go;&^fA?MVC27Y^n+I~YG ؈Kd-!Fg3ZsaOw-'%MYX,ng =Do 2kwHŹwV.\CZf =ai)7Bd]nI> e^T__7Sڂ=):q h*4;#@*RO}E)P<[ ֫BcI_X 5OkrњO!MCHXeUCp("ť>kPy8ǁj` tMU@B#!+Ժ:EjG c79\4uR)jY:Y6ƃS CA35n+ <M |<6@-makCv1ay=oIoy׷x[8'Ϗ:\xFs DJ1qw={TQq,m Gs봗^*ŇM\[$u&9|&\!#&;7+!Р]p8<ϕM,n/@)P`XsI+]GB[nk'c:ݧQPu/nkuq~]B;5 ˡ_KrV{~ùcǾ3p \SP&J(*ylLheՖ16xǶu%0Gi>養=E,fΨBIw<I Ib0b:r P5 aPwϦAkѢȞY I9;`Yesܞ"WLF&c*Rjhm)K9=Z7nTOﳷ}2}be*0\TsRD=>:qj'8.Ȼ><OBy&>Al1Ok=`m`o?%D< [x?tl#x3P9lڋ|;Gw\ [}fpuaI-)O8TauXvMhp9P^y3/EpBT18(Py-O̧`>i4Os7Z+뼍ſtAY5p'QsEym=d7,=tw_%emL,tGv;hcd#,$ZI;Ńbr&+Ed&Z@8uP5`0)>e#Xk KRx'tsNj*0.uߞwe􀓃FOpgkB %FEBqpVI.pFsm'wl[WV}p)A mIXJJ֍ jH UE21D њZ*v9@Fng%Pւ-![rʤYKn/pRŔ(:z>YPԿ긥cqEv+sAX Etw2[ݭ'S Bt ;oS߇p-OMm+UZ Ui@Ձ2_nE}5v~ʠD)c"q碽D'`=صy8(ACt̐yb}dP[,@!#nn9 FzHMkO%(kV!dB |^e"+a0|A(1PRv ؚ}2;Ͱ^&c'=-v^s8DPB] !OdU!@zknBñB P{:Rf컇*bNPo04@2Ila"1Q[<ைzr̂oMҤuG2;oorI^=/]?^O++fcc,9]q-%k!Q {W# ݕaÜOj7H^B|Ňwy{177!ZM; ,nEt#ݲB[ KtˢTw& ظP͒RMӛ:1X uRoG{gZ~8‰|s[߇a|QuL)x~&OsNmfWhG |!(3=oghS|}T R1"Dˀ  >"Ԣ|x E|YR_1ejEPܤ7@Q" "~qOl$ML檟L⯔]\IË;NgLXQd^' ҄ЃPxn(;jM LNɯi az0\9= brۖIwn-RN rXWCGRT!Sdu{u9s Te:0FLLK@!>}u@nbWd\ks.zFl:2&K^B"brG9PF#FEbRtj&Zt^X- Etw豪EQ*4cUΣTrբɍZtf{Ǔ@ 4䟬/-ܽ("t"ew,=10=:xrcM4b&>Hy񀛠t],c wE-) θJWZ Qyl#i UNs4Lycv;=vp~^!,L*R^\ A]YB(6q5J)}}J3-ERF)4Bm)sr$R8'pgk(C!3ij":Pɚ&3?aD0cb&c@J iTYF˜1Fo+|yjeRxJ5ZF!38VTiB5oٻ@q;eG; O[a!^ o''?:~}[*RfMePqJf{77;&W+!R{û'Tp]rʀ2N ngB[Ƙ2龃~wӭHߴ\eo"}Q SQyaT6PLvjΉSdgrD쮑s"OdQC!(Aw)9-鶵rÃ"GH5Iok>=87WtkG1Tty `=<[RrO"ͷMp\ʳEd|A")-=@0^CgAdqHsQxfin8zM_gqjMϖӌ(4(Mb,+jͽ/I:jQeߓOtǾ Vۊ% i\5Ii(QOr|:PIƘ B7PC !妱`!ľ$&z- Izsm[b`Ȟ(_-==_( DhW7(#.pgNk"u 4Tx5Rup5U8Cj&ٙAD{\[L+]%2 @:}[dOT̘&j-i4 ŞsJibY؍H^ZEц\,̥@tlȉ0& fc:Je<ƔBLa(؄ 9Yg w-4hZZ=ﶛ6@Gu%̩rt/Ăs&?V5bcCcRްcCӊ<671yasAA) ><ެKerˣ^NdK-SD7(4R`J}) 1 wN[%D[|m3 xBXm 8jk. Hy.T'=bj|SX xͅFk A5Qh#3:~h8uEkHKv uK ؗlH}B_-ْ,HuEg IنH﫮K_*H "p"J X|jQGB,ć ;PۮMDuµ\D ]LGܷKEV-s ַmu~W t-?$|Ò)h8SsjKRW%3ClB؊j̛S8]G1JR)d-hAtE l\=Ѻs"\}|Gl:7ܭ.^|?6Zk3ouAsa2''I8ֹ m#+7"&jpK R&T%XXg$0MKbl R[ajG>'d?(>g~}͒V?WR"V?]?e_Ԉgkq[kgp^ l}Bއ/_Ս>Of jW-ГO_T\ :GK]|uAgǷ/k|}34&HoՂp/_n̕}+9udǤ>xE hn;.ZX-YtϾNjw{PݳT9x,xoR~8+jg4gџV=^K,_rV4* 0W3pNS '>35Ci;>*Oϻ2GKDԒ)޷~~SjdRNp0f=C%I;I[twE ОInC`vWmID.ރOI`(K>AjC0(u8Go6`.2x=WacɻZ?XG6WW)Bhx ٿAC˖{xЦF6N/ Nki<Bq~ Sf5Ko/o-98d'!X=n|QgmY1qpۢČ aWm>g#rwHO}{wfap9s#8X,O>q_*|!+V,WѬXl q9D;!#k,?./#{sFܑ¹AM7!BkĐSj!%f܉\+-u 6,K:W-;UG󱕌rjFSGrͷg0*C95!DW[.8E,zlc1E8PY0hqC"JNR b\g&@敵H16D̄Ο0tHȞ|"!Y"吲~ JidL9]  |"OQm_.o&jSf ,(AD(vZ밽:ERq;o<0^mV[,0-ѭ=E+Tm8ny)Dj-}ځ)9w!zH;ԜamԨf?ꯊs!fӷBkIݻЩ#(\5LjGnI0uSN9z:2:tϮx()FH#'Й*cNNa $ДLz̋C&$2P&zRlz̀Ϋa6'lh0CAm 쪦 %|C=rpe.Fwza0?x]uu.L `>|▗2x>nQYSoS^8ؘ:^=|x^f̂-p}c2Kvߥ@WlYsՑk~kc!w{߯2 ʬrCU}2$1{H@cYiF9 wFȫ@̣-X}䏟Ksfc6K~{lO,OǛfuxbĤZ[dgXŴ+ήT5qE:"T]Z'7(y0ʀ^~wɊSK+a91Jc|M;2t`Gա=u2&84?o}:90t8z26()_LH݆ Վ1NdII҆EkPOcxiiv<ӬB|9L:ͣbޙhk:!>Ţ<JB悐Bgok{v"P{WoglX3jK/1Ag/Ơ BȖFs PvZ~]ds$)iԼ-ISi|LrMQh19Uw~p͝&sŭ@,: 05$*N,ճ xX=5YE8DO"K!;?ZRTeJ]"p}Umcl숝)DŵMǽc~یnԹ8+36MB9b3Pr<5ND 5 e`Ɯ:#hUORUZS%B5)o2 @ 9GN `l4Y#c ZȪop \fjfF#)2x#'`3 C έP.'jK~jcdWd-2]pC ]N`$r܎.'WesEz\x㡯ܨ9;UO&^ݨ #8wOLyKx8<0mTv>7UqROVMSƐە˵^"gRO8|P0u7Y-:VYRs |Y(RmW-Mέ(CaG&/Wjdg_}|qc<65-NމV!b*5Kob6cc 0 ?2ԝYyseHjNO9G:utLP]nYMW% :qx3/!}}ޓiA.䵧BTMi퉟q4#}R\fh+eU{Q$Ѐ0[?3L>r.&y:&92`bsHAѓAQ@o <W8~@l!4>tnzkb^⸼*ji*J7X#%Kyѳ=KyѳYUPD71&Ra# C+ _E;Ь(s>/`|B:ɐF(Ibg1TL"9֠Q1(To3 %)1'-A}_mq$q.ނadU49q%Sɹ4u?m^_Gӣ F[kM(P7fu΃ѣO^.< 3\381TS҃6QIɣjs,(@I˰-xmO-?YgʇqjP$s`"?-k0 3TxB*;UHų&XةC_x8J,l!,a]^EJdaGŁ=o 692beJ&IԁE9R[&>XKfGo?OX3oo몤U^O^4AV^7Awco4^y o/b|W_o_~vwmmJ'U {SݤS. .eJER-m )rD" /H̠u4]{?~rԃEeCg!^Ǭ7@߁[0%WXFF+gVeWHpag7׮dV 巟޼>I )`ѢT4^I"i$6gդ<_u}8Oyﳌ x˃g&(-, V* ]qJCǐ VK '/ jaU8%SôQ079ڀXJ(ڡH3qZ"o\UogYb?e^|)SO`*U[{P-Oj-ɞ$j@}tOfD3Dm#OfHyAT-^ePJtO&`"zkm83!YJ0o[c#R o漖,)|}MQt%s@=Ɨ@G󏓛,> 똛gzϪcg/64Xʱaf旑aǟCQ%|,COh>jGV|wRrVe81Uc68S,'B:cw?Bfi*:? .qYVVxXG:,RްgoX=nMқ#&ԃxPqIտc3=wRh{ӌxS%^xE<5]iom;H|b^DRԜH*%ĿWۊnz1Fi~˦?|mzg哻+9noglt}/~ pP/W c4RRRg ב&~xϗ|6ͅY.\/ݤᓻ]U=ST=#C2 |v^N73sʼnlG+mbHVpU;{aŠT ` ünspfxPpT_\:U(õl/BhŠ`e~^vW| /7?Yo},~f~h9خYE+COt,E>VS>UpP5~\qnH= K=>zN񚎘5~6m].zߍՌuvٲOZݠb](<uȓB.ega~afpY(/_y2LQ-C'w{ k [zu[~4P)`OtMT{M\,So~o EʹߍG?Mݮz~L_?NZ氇 ={RH{wIi|rQ\ӆ ˯ݭ{_xkTfG'stٺ)8ʤ"#۽Lucrr#V1U4Vos@n q-Nu˝_t4@TY㍏ǂ%|>oJ&p68>vBM%"v ]u4tZ=Ύ# QcL9%[8sfԳ_(* BJYRI q:+BV!)ʅ7sopVZk.6M|G9T+FjPdpi㬉S˙I(1+ g 20iKh 7^b]Pd5 TkZ"1@:q#A"jY̅Czo@#h6m,it{f}ܢZ&R+O1-[9hͱ7Jt\`q| ^%^d ֎rF7 gLFd{Ntr[? Ȃ/ KhNp;2&e5M %_/fb(ɑ)rYA55†ڹ؊*r_{% ٹh?WH? KVu>0ΜʗV3qL(Ir*:AL kbd"w'\xȣI&96ߚo٣;~pJX7#!ZSo$kbRuY[dZ9z]ZrތD/rLWl+6n)]۫0*&LqY\V?SUP~O^EL-I\D)1>>)˂)OӚ? nmm7I@t!Wb-19rz Kjk$XՊ1S^S$z?ֵ-*I"q9IyҚ6Vbٛ9}bI{8aaªU5CAփ{W:'F`q?ϒ=eJ]Ʃǒ='}jqǺ= W ٺru"8`qC&4C H#6Mzֺŷ^3:zjR[ש${C J) $]A3 QJKyӞt־D{2XIYŃtii@:K5KRpHeҳJ, tJI ) ԚЎp 852hyԁ@5% ifR$KjfҘ R/b[@JmuoC\PHi- 1ڪD|fo*VDJìCĦdkZJdTԸo |'}5J=Դ8)A D[兲LKL)=͜tRR:9AyɄk%LҹRô|t>}NtY.!5ZmK0F*AAT 1||vbjHxZQ#JEmд$e-<0/HPW=Z )I`;X*, 4'>)ƃj@/a%;=ʡ{/~tlǰp ӭP&qQeOR#%0|lnP  jbX` nG^#!cTB v#uy:Fe%\n";P<+BqؘQ(f}Ъ ,uFSa o( qiFqk1ҁ /Y&a^ d(-L yngl4z5vvSްgc4^)U'?f/ VP) +N\v_X1(+.ߠ&Vo󄂿\G_R$Xx' ht+dR\nCi:^缝/F.X߃$,h~LhaoFɫeJ[b}_G7>'͎dg<Dbho<\8y4'T?id`Wvi6Xa/gO3Il.߲0T`bL O h "뵣[G4z\jX6aH4gO2'TKinlEu掣ra̙Tv*PQ\~u%aw0/Z. } T)=L T۴Zڂ?c<1\xbwHO[h]S76dS^9?5y3D6,޾$x&+jINܵqޤu*Δz9S135Kѫ}|K.K.Fd'3ML0@"3!qNBy8"PI__}nPd9Eot5v5ll֫{i+UʽƩd-G%]霣Od.[8v88K2H_65'M_7ل[Znԗm­&ڀqWעYΆ(:@chI"bCp0Bh9f$Cs7Ц?<0~;< JRs5D"c֎14!b-s4C};FR?nT0;!j]vr{ Fi=v$6Mql3Tk􂭔jSMUM}}{^And~=;?K->93E4WՅd>Op;YljϪ1qV?>_Їh>D3!u3|C[0an*t;5問 *(= +? LcHz؏eIyþ =MQ< γyCᇐ`!Y|-@SnWLymZLl8$݉)-DAW(\W_l>Nr!ztb"$eSVAѹ.3a\a\R}qU­pׄ=q'ۍHW0Egf)`#-X,ր<%jH >QdwYUEMqg\a{wg3J2 WY$DDjx 5ۧg~0jQGǟ-R?,qq/s  ͼ{gխc, PO~s `^I+|9gk\➋䓀&c6&2p4&1!Z",%@O*=:->ljhDf(hQ/EDQ"fT! >.oi m95 ;"Ϟ/9w@DVTs2o ^m DzpSrT"X1I8 7@mfUƳ} Q^$mWQ`X  :[Pun!4:2L^-Յ1l)B =g# zzTځڨuo-\L4ˇY.ݦq~jAvC,6~,m_]\*< I>gyY|+ԗ_˅ #F8ם}+I׭lE$ޤ+/7EE)[nP 0=OKK1fb&18$%AȄӀy-BH8AYBf HzHJDp%`E)FH 6$F#|{q"2ԻE?\ٙ}+7?͸FG)[(R]-s.OGvQ^k՗P/{_aN\s|Mt]=P(9);Rbӡą'>GR ΜƇZC2o˱n[=gǭrN9aA{3u>*h4>U:09BxP7 7@MG(!pW|WX.$v;gQ%憟E/$=eTU1R S.A@ZVnnUwZ5o84Bkz=|&AN|5o;(AC׼y{Jd[|ԡcuNe 4XCg[2!M a>tj%Q‘D2JŀsP)- \U,I#l #Mͅl<`A KF0Fgfc`9D,wV z:AMFk %Y PH˜rTIPKUĞ+O:@J-dK:ݏhݩW-\=Atwɜ6367tȞ׼̗maΣQӜQEqG)b\^}Uڐ^|- m@/2":l̞3}hmhI8 ɉk6A퍈6Gxjt{1ZC'*B+& G]y {*_`m A*c?pWF3 9Qu>]._vE Y"шDE"3)OtNeO[lN1#9U?P`&]0 ˎhrM[j I9]N0Jkzp[c$G#;MQۧVEAkmڂ1QSȖ: n-ɘӌ_iwH+_y/p;vou?)Z:pĘ>x2$oMD)B# ǖY5j۶΃肷CJ.+LHὑo_zӁjb9YMjJ#E!xŌ,D&s!C!GbZhYw]kJ݁.oռWު{|5|U**֮ݽ+$(Ef4k[FN?oL08B}LpV*{efC]1#W;PA5_1AxbzvzȽAq hL]PcЂ\ukA~vh;g`]dK#􁬓w/_ 0͡o_o%|lӫG\_Od\{$Dؕ|K!w!7u+h{#]^L7,37.C)3XUHG FI%T8{ӛX6cI w"ʢcⳇY X+Ѣ6/P2zZӞ;Mlma%@Fj ez&bZO)-Ŕ,TE֬q1ði)"fueۏh$3 Q5o c~đP";첷d2ʐ2DJ50lզ<#2.1|VܘUI]cZ$.IOdUFb.CgȆFh; Dzd/mօ0:;?PrnٷQ何4(2\LJbbAOK܄qk,_;a(,Μ ZC9NЪIέ\ZO)eG\B$zK1Awe&gzB]=M\zG4^p$#DŽ*͖qo[.;})|>?=9 ;H_A:<;z6@΁g{폇B tу 9%:S"[)/Q2oM%eML$#HљȨ?kx~~!tM$9<.JFN*9DI!D%4 뙤vȊ/L$="Qqo|tsv_uL5.OQqZkɄ.Hq ӲCsdC4`Uw;{o|@#-DN&2&oSNN<|ڑ/7Q=&)AR%1]KLG2b}i-9AǎsIbƭEh5tjC2d/+1KՋؼ3ε,; U,:u o0UOYl4x[LN/> 揝O)/MG_u_u?jzYNZ$g&UXC2Z2Z()RV&1 4IO>~0HyF`8yU.lv*mm'WI$wW=okmH /{{H~,pdK_#)kIq6~$-5%Rf G tUÓzuc9XǭD#FrjL'sщ]eٍ-oR1a485?>tF 92ƈE.Dbgy&jwكA*t9mu%~Ky=߮ұWlW۾=0l1}fplthi.%9@o}%RvvPߛp(wHo>Ռc)3 sIA]{YX&ޢChU| Ո@--,&n}q׿/e 9ћh_Ջ!= a٘ɣ#S+ZP4J0ӱ}#eUB$(nmPھ#*~/Y6{0㪹`"zҐ'q66uD0`Z3[r  ) ab ;HkRvjiV&{^26(GL?9i5(A @xѮ:`VNEp*j] } R?CEQו'B49hκ#X+euXNwt{a!K[u7Ȫ֩G e |hXsg |j&קE*N/'R6YƐdٯhټx r׎t_ZT$I;Z''y-YT0CR{ڻli'PΖ׍|*ZSVJNmpWiz R«^GsbF͖1> sޑ I1#~gIyc{|GR+hv*v*_M.>+ - %;`*Kkwt- Sc 3)&y]\>n{4:a'k0i4ӖxjAxΏJ)"LEnDuUh*IG|襀I8|w5RH`uzbheI,hy~D+lvb 7>=Y+?gWsdW]-dD,n\QjWÃY~F-K[Iډ&`kZy*X哳ޖO^T"Ō\r <tifB}eົ ʙbxc,b1DK#ʰd8CSh@00ϠGoDxGe,(rbiFRV ƵA LukiI[q\ZǒEnÉSͩ${^l2@X>w D{KiSޫF4=X1c`hD(#te(`E{*:wwzx[tOi IwYA!^E1y>?Wyx*p\f2U *4dz{]>=LJ6T]>=۳c_4tV+ g W gpT9@Wtp!^EEw07 rv^ɜ.gac;h9ogS)4\y$eN̊U1s%4Pov^L0Q~b 'y29ro7ZS|ܧT&7saz8 GJhUvcZ"E hH^uaꕤoo|ªK9z8ABrݧB)pN}ibORW5 poZpg—ԮO!>ް5e/Wظ;j DUݾGF_QU@R>5'ٿUJjE:Dz}a V i4j ~W^dvu |I>f*@8FH 69!*`0qn17lQ0]g'(VZh8.qBU6H1PN缡 5cNE=|:`^#ĺF)lq# 3ml`Q2xȉyCcDK"(mam6"xgyC'3ٵ4z7t ]0O ih)0WZFǹ1N3 {NL>|i-V!-m?'SgaUngo-Y&-LVz-%kG3U؀0D:{vvTP! C1#Nz,)zST!|OcN%!%gȸV9]bk(id|٤ gFkEf#.4 WѽuJh$:٧OsVfx#+E"WJ\q7+l%4I~l;Q|zu,o봺Qp%](+@ּ~JKgBIBiZ(*PtVxH  x&IXnJʣT|KlQ75+?.D oJ}i鲹nK Ӌy m7ѓv'W@LjF_M|`/dfӫp5?JP V ,Ёq*c1D,oDyUClmujYY5h`ݮ`yw%|o]W}Sl1ov^2yiup% ~pqzV78]D>aa.X8IoR(g0hJJjV35M;b$1566X(?)[_'uLtOx\9>Nm{M?GmBT7_/>xdz}_zk@:l_KG%D D'jo] cEzP]y!"?i)҂]fe^[NKD5Y؛iiǘ^@, 簚 gl[X 5Rրcn ZM6tPu?u +DϦ,@Rٗ/uKʞX lH8]x  ͓z݀ $wvt c/ֶ55OEvrTP N'kE{j>Pk7ѵ7j1̍xtyFn6pt{+Z=Vp idT cf caAHgs .NyCfNTB">4ȁq_<:GU m~2/[2hBx4/x=DϼP FebA´CZfD''8n1h.??6驽*9Pn SR,A<%Jk hebw8/)41w])q_v/[q?Uq$>l'Cj$1ܵ79٪{o_+4YLE܅66 ?iߟWǫGգգ]7- x[%ܪ`yK!5J3";$9J•3n+ǫgqEqvWUGR؃~Q8? -f)(W\Z$ HA)j%ie<\ T)NI+=`}DL򋱗aNQLh-l@3_Mfon_G{{t.;NF%Hͷ@"qXTo>|׶0[n D3Ѓ>$zm' z\ynċRl$DH0T'/3hU:{es߯Z/0!7Yw{3ޜ-~?/n5rcgN>Ź=Y }-[V3)M{׏m09ozs[lhn;瑩*ߤRP Vy0ݰӆ!tBR&a7 U")#Bk`zBU <i)ΥDpB3fqoGyU"T (#%k:Š$7# k Tf %~sn"(p{>rOX(C6m.wh&|c!w׼^ϟ%&>ٻqdWTz3;C0'LML%L'S*cmdG\vӠdHz5E@ht7hg\ߜ ""B8*w kXxNJ:S?wfe2,3f=A{79"?bE䈀"Ҝh2xH_zcKAfp@8:OJe*/>)*ol:Y8oz;v{c;v{^oћ& $) Vbdae"n : s4,5+כ(ț`05:Ir{1 }z (E9.9vQq1Yof2N%3т(e=8$EٛF&1AIoQxv'S ,ek=l@dZ)lKb2Z$",tGdrsA{wk1I1n]. nF+ªJiF%0[H 2tcK{"8D4ֺkzq/itU0\xנP^ Ą()?w Eǒj6@:@`pڵ nQ`pìTL NF1IGѕ"J$ XRvӾ QO#޶e3|/kl}ߖW];".W{a5F@WlѝR\G7IR)YC 6aɑ>oJ'|U`SH\+wV@bO0zhqf9_LŭK&-,`vSb:x |!A9$BhI#NWĮSPTՂx&O%Fd+aW?)@'+DA!J Qpp3B0-+U'`ׁTTJz25M7ay2(ݔW n˽L]q '|@X )N|ЄFBţO8u}/R>Ba~UПFAA2ME,%5*x]G!ÀA"[; Wy5< [ i]@yݴh* QwV껚Z(̏=xm  ɧx4~N-Eᶢ6|`BS%?9;\KM:]$K0Lug~I)> QEV; V_Vo-ȹc]\݋^Zn-RƝ5XHQ_ǣA0ݹI+No7iӺs,FSn?G HIf;k& ڶP)ׯn!+G JtX#X5zhxSCMpWYͷ_vjc PDEÇ/Pޖצu,,/M-T?aݺZ^G'W~!ȩ}]G%a ׯ4)#T#R4Sϸ5ځ]}!^YKꧽx^fJ4W,íV,:wpB}f!̥>opӺsf{8*E=#u|s:fաnXkƃDcBr*$6nwڴE4x8 p"'/Y PIdIK©JbC BP$!W8dS$;xz*ڪSB'%;lxzjr@(u)t3gETGH?ٹF狋zP7mg| ު$.ܙܬDžz ;){cە;[OvJ_\*,b+X9sB"3wԹ+b^onnwq_8>D|yZ.'gנOzBP 8n]5h]YtJP a&U]Y-ya,Q!EQXZ,RsHB5xvg0›+m٧g,>hI1}ZήoIQ%+رH͆;|aTGT15j;A.g4s]Lo.O+?6I Ke/c ˜/(3ҋ4,bR( mW[,[?1cgRv=8qvǪN*8qNR$%{ ɰ 3I0wo+e擽 (xG4w :"vC eD4b;0&yzkvh\8W5G Wqq`I[{6w8^W.lU'4L鈉$bH8؛e2jc h}kn⩍coOSNzM>NzuP VR#'H"}g;_f 7)쨻ʩ kyZk]N(O?fFyj,lFY F2F}Tr}d_3 ׇ) yiDz'8y%> )u>_(Қ<aD ~:9c3?۪1H?Vn32; rGK总D_N2t3Fa\I>WQ*{ B< #P>%Rg;UukI0/eMJw=C|p:F$wF]l\U?0؅Q.:vaq1}C#JnX%)RLj#w.K1"{XYEp8xʐ28z 9Cw6\,@ڲ,ݹak^b`֠ҁ{*sNLVWIM]Km0E}[q|Ʊg}Ƣlsh+P*SYtaN02_0[&h4aVj%2Q޲Qlv~l+{!E1k&YBhib?#+x8JcZqU6.>eHCH% iO'G} *fqjH2Dx)$`{DfL(0Ś$RO҄ǒ+ܭIT[#,YLi1Ò MqƲDpAR!P7q߅' R !3JfdDSqNLSgF AQ % y2:*f;Unf}gɷjañ\m]|[EͶzmBr.a0`⥻9\ߜ ""B8*FDݟ/`.-w^SχGY~&+̷Yf ll e?bE8n SMnW'䥚HAW*B8 "LY=(0-k\9σd/fyߠ^eOS4ϲF `祯U[铈B?$[E{nk5e,5GJ2)N> ZhtzWUy35AЕA<z>lhWz^G?$9l$V8nw߶ `T$"-R1@c@ hmVWtvi>oxpeJ߳.O$&>u+ܽ8t]r>ڮ tBjQ3}ݲ'jN RTpZ@O>ط+RO3rbD`\~ߖ9|ZsM_]X벅'5jx2ʑXD@" )M@| K_-jbl={e=fքR1º"Y @bPyT)UrWTݷV*0|5û(iww{  &k*-iO\*L{$.w~pH|êP.P8NxO;fq12+?RVW]l{{c ;nٴ3Zԯo~돓2B? 6_u754 㐦ms =ko#ǑEЗ2_/_|i)^IkQs~դDU\ zzWwuU/ɺ:9XY/EKڒ S2WyOwM4S\lOVen 2gP0K2׮hG3[']{m00z=6!Bò~~Ǵ/6E8ys}r}ݖqoӧːwwOx,Lm9jx%abt}s8 =ga4{7vu͡RFe*`|ۨ^ݘA+o@r8PkTz|b)-b}Zot]W.aoF{n=oJc-hb<=Onu7ɪN M)rmUy݌DcDi^ޕ\wmdrً4Ϫ8Bk7|xު8[%"x_Qt[^yRudXbvޜnGZW tj%Em!)o]"<]|].kҪ:'`xIs|݄r$vqU>"ަԪva*g2z;PP._c8WݵsÖ@ v25ܠ ڀ\Fqn7R˾=marˤT0ZF Ŕi h}N>Sdx+w# Ƽކy-?ZSB10 aNbyb0K_!5>"6L+ymzXT(Fvө|A-:*]tY#O]\֠t/J bߵ yZ mا_2Wrg"vӝh*v(YPJb7r p&X z7^ɰM~Q$@S_ҥdX)$+, *u[B@)RG͇]4σA0=\ˈ fɱe٨aWLAF8_x9 7m3̊2»Z(4b+kU? 6^/(C*\cؘ5> (/XMʞlm. N>jzF@RNi~#+L \cJ4J@t96=sү[r ת  u=gC@=Q^|x"Q*$u nTQ߲#n[Qͩ!Τ|#]P1x]RE׏:ٖu KGvGשޏZ5/B5\=jXc'crV-Gk검nLGk,O>ZY8 ˜CxH_]n{՗bafE֙F'K+0i{_N'pgB:& 8RXF3RkYy^]l^m3Kz8P!ۦ ϪZx+۠RQՖNQanϪ`X>1,aaDLAAATyNpvy<߉̎5èa&Lη,РLQ("Ftc_Vǻ~ssЮW1|5: !G7?*7M#b=Ļ;]UY& gGP i>=5@l0ZEָZ).g_zT9?x{X91#D$ΤX$T8W 0KhP7U,,$N<bkx[WgZ5.AjT*ht 陣2*E9ɳY]"I. B cF7 2<3RZnT5CWSc A˜]&GPF jلuE7?G)}t2Ug\1vX"״6w/vR[3g72 =NL э1Fw9O.Rky8zٚj7mՍ` wVGE9#rp>rx"z@*(k@$IGdٓBVZ(Dp7rq7ryX3id rENh mC"P%=om#Ek3jCuH0Ǚ!/wDW؉FWdr7x9X?w<إy޾9U '<HZ>H/*j*Z Q?/n7,Mu%KVno,=#I,gt2rnmO ]ޜ%t:+x?^pA/iɖ|9<|wxkω>׳|<{jrs*)hB)AcAAc0 W?=4}u??ybӕ]@qiTo!} 52ss)ʔ(,g$Vfi:Ƥ4qԶ4^]jA0y ( B)Qim$E҉yȔ)!~0gPWA-ҫAMj9 IJ8r0i ԘɁ$ # G@MWu @sVrxC#2S؃9HD5IM_A}ݑP"AԚ14لd "gyԆ&Ʉ+"5,[Άƅ;Љ VT!dU8 +'ldEDpY$ɟ)ɦ99q Z4bR)kj)vBwr$*)4KHzFrYH ; TJ޻dO*IUx~\9nl:uHvLlCkwl|>fMMl2I,XaERܒ4a♐Yŀ:YN|k&dHl=(lݻMθDylv^>fM*,:O:\r(tLXJ H Fge6􀌤>=y-̣MWsO 'cJX"✳DHt(=@^‘93`6i $jG?[ "gq,0O{,Zh|[ۇO[C|sopyU1aƛv?2wߜ=e=)˛ c_Ͽڟyfw]yx}M;``V>zGRP-w>kP p)rò,~*5i5ҘIQhh 7 @m) ;DSOa"OGIc)d<6*וyA2$hA6)KNG/H{'=O~ड7b/ЄZeR fQeo3ɈA,Gr#w|az휳B+CQSL";Ǟ|A ضr|Bf@`cƮAMgC4lWKe^^zxpxIIA2]~,pÁ~]"CFpM]V.A# 4!AC1F[qRFJ"E#N(Llܒ'ӛRe4|Or3]JРFfrěXa?QStQ(dz~DcנHŒ`wR)@5GHἴDcǠVrm1//htI-@&7j6{LI%}&3ʖjxs) +mE{H (O:p1g.LV+Gh(Uf2(#"PJ'enw.,2 ˀ1Q.s\P"{tfI771ZA2뤏K2Qf51 PK%ۭ@ |ӓ7d!"?#1g9yimX06 jTv76P3^hʪS1:QS1( UωOs,R +MUpmKGό"|83!Q$D;UrP0<8.e2Jue&^eWPg]pew},;U BtA9,hZEuκP?^_Aͺ2[ЌA1kVc_JT uĩzc8f]W2[Sub9vU;UXd]-Z#wN;"YZ+ȃ~³}{uw٫ =a廏wv1),ӭ1^ϰw_9}`|ϗ׏t`O'z۽Y)lrvI~Quj/vQJ5ɵ$*n忟 E)aÙeݍF7a-$ A$I-ѲR~R^}%ZщD]I]+[}Ǐ hhKJ=3ٕTWsȽr6%󶥥O󩦴ڛWJKrAS ht3S^ԣprrN"Ě[e|]1a̜vr`.6D;)k|%{P0޲jT<=<O^QEO &8x@V2✑۴AUfz [W䃢KՈ~f6R'Bjz-jaHgP ϚDnf.VᲢ.DB#E hsJ%{t?6D8%L to!(zu\TmѧiW”[wkh'\I`r|Sm]NnI6mVsr/GXz~-FiD>7߿`是sŲ֮;%Ayn=IA3Jhd1[1d4-C[Δz0# xJe"`7c߇HX_q>PI'A Pؕ!-D-z VŻ)cdN7CYHSU`HH=+)p:XǷA> B+{D)]ڳ=Sl(BZz5X){N WU=D5IPM*.q 9XMhGM_PoV!S)? țQA&>.M j#O!ג;$[ZjȞԣ+uuwѕ/! *V+^g9m^GYI_ʌ8έ<\8ԇ(3 pgIո J(WVtx׭[n2[ Q(L>͹U퉮**Ғ6_^-Asl~{;I@%+r!TbˆE>8"2W=HAf*4bhbib1t*Dj?񒱃pLPzp=6j70ݧhyY$eyv]Z^?ow`֮ZCFe|``z۵v5Wd1?Wn;=7DG`wrMOݧLf~S̈1/̯(B(=gsy=㹓ca}:ާ+"`x;d43iDLDr<ΌR4I)-]cW2R\fKudVvV%Lpm@DžЗUNo]#Kj˓pჵ Ei<Yq][9IG@ /l.j72Su򹝍|ժ'b"cY<~@I&!3E_R?GY+OfZʆAdG;jcCcң;Ʊ_Ǖx&闛;/F飿,ho'>wDUzw'$ N_*;퉮`iLͶ]WX'o OO}#ͱOs<C)Swz y߻b>r.p\:n81GvqS'b8 6RX%*ŗK/'xx5>本جд֭ p7#l6>>T]`lJ'heND 7tgruO8Rj,PX7zw*{;֊.#qQ`9O4qIqTi'Jf 2ā|5owm)gd'4`, +y77ދkO}0;tၹ4dnmԌ0&a)cD(չSfIaP8{ȏiZW֧T7Es}Mp\5bV+]E*wjqɭ['3f߷* uZ}x61#X4f,\hN9.[~`! mc+C&3ܵNd(KH4䅫hN9>d!VQ`Cޡ}TȔ,p8숧nBC^键էZb ] َ~@F~k -d?}b`wһM|*.F+U%57`'z#;@sF, xyGl6t{cn]b`dcQ+#USm;$-4 %w 8H"'ɿAjR(fh ր(ZF&j9WZ7 05wCQ؞g@ 1Ɂ ɕ='Wv=Y>!'ʓD2NYbdBUrRLҔ"%8J@KcLsn"E/;E,*YQhGgѲ"!n?,3#m=΂;=y0J.Nu`|ݽo>]ͧ+te7'W9,C)#ϵN)T a,r/ ḴKáphu`#dJ)tkeic;PŁfnQ̰n7(ʅHj9Yye$ט&Ԇh!u% c$ lhnӞkfcpǍjC5qA9D>X6''1VF!.y&&4@2LSJP"8ʨ`aVHh9dY6B=rD'he%$apIXa(ʵ 3-sD2!!3]%%љ3L`6Ern?آZc tX">wLU{.g|U4D$f_X7}4䅫hNA'Q|`DMBAVܧzյ !a1Ka@W&` E#$%?jFhlOH*CLqtЊG*naϿلaJk̻ÿI*2\aO/Ez$ e{QȰf[чwC#.FS5U# I %!7T:l}? /Y+ layӒf)88PY~]J$H9{}إ'rןPSxP.d3DPqJ $8&$Ay, adBN8ԃ* !%5FeYƒ&54UqL4") UIe6\j:lL8nTH3 )Uy,G1҂3)7 &6H'!\iQڢRÿ\djs{u=e A8آpRM F"8Qy݃bX]|zKr:{PB#! !, rxě)i|c;h ,no%pm 083׿n"C0X1yRVͼ~芏;*S\}*֟1ۣ9"3j}9J0 Ssf HuYqpԲ0 o31\'UW[D?e $" H  @Vd  &DK&"A{:/׊#Sˌ&)'Z&R$S)89P9e"?r̶ݶ3Wܢǖ cCp1pVk.zH׿ePv.ܻNIT{~b`&@zeI ÞdcK6)'ը{֩'aȞ"Kyq;2|6^%a#B:Yz)'P< ss!BP,[ۣ*ځ71g+j# lN|o߯&o3y~J1ryy.^̠2!15,w /f]S!虒ާ摭gDQwogg\uȂ*08lNk3%D5K(̤91I D΁ o O&<זen4T)NI.3U/Pnr"[PXVA?E;xw]rYjj'Ezyr#`} o7x6s{l$#~fԬJF8xхBf@z@>8ZÓwT11ix n`" }yPU>R!lrT>; kZ nհP`tڹ 4ƴu}WTj7p:`DGfL GE;t rd+GTGf +3⭫޸ MvKו{\LtrݐiB$œǩx@590=TIckd-lvf5z]Û.AG(%08ۆUO85t3<~8b xiIq&iFrXի-Pj@`&$SByFsMD:׈e LP&FinR)#c{S1bՖ7F_vܺP"@^6]Lvi4Ej{:'3PUnK*")ȏRAuG2K>ɯBTG a;`2=ϝNze lӮU+[IuN sovyVJ:ꭲFNx ZAԺՀjl 5@#.pufPڵ:FZ4V|yFNgvq.`fG^yq4^l2oܵrGwz4c(Mp?`U8޿LJ 41{N_1;]`1v7iO!DZg'GK d)=T},f;y͂v3jrl?|3jT~gSgqu}WUJ?kȹ( -$E [w(k-睪ct}&齂e&Ma& # يg9/2;Wl9y B+x!Mf6ɜ; &Rr}T{,m-҉Zυ#1HYK) HdC_vbig*]~s~1r j4G}}˫4o^`Y/[[֣>V7ń 8tdӓ p?CoKT;7r7D78F(6wbҭ<w"9F3gMwԩ9PYqGu:'/Gc/|ラ~|3uL&K2𤥤 fUek-㭞6 mD>hDzgB(U֣F%qY400n[1ڨNXaAڀ?D/o 0lYP8<йs>oYL't!Y$ L,/\zAI˚P;5%:8hòQ7#lfmA//۶V/|UʃX1rfY'C*?'"_ !/\Etʚş;vFGKn=okƿ9{C/&9 RVT \e[<:Rhӧ؟Q:hA45ƈqNkc+f`uY@=j`˺R+b8&6}|<85<CI\;Zr"b.c 1OZ8iVeV +V"ݷObt f94;]LL[?}[ܖ%=\&c )7j \S732sPu̶Y41NiGcr#>؛>#O6{)8紳2#v7wZ[;PY@RSLH2IȈj"'?%Ҷ>ra3SV\ZQf8 *6'≔7K`JG@w=g3F͗\fSa\WWm'ק ݌A3~YQJn))968l8R;ٝY}c~GI9z.wtDY˵1 N^_"oI^;=eċfu;޴oé2ϑh '_VQ0xai3IEĜ _:/Nr .-꯳D d'c523KCsDs` Ѳ.k6q ljpSH׍V(qT1YE8*CF0AmAH3~6LZ+M5g|vrJ6Oַw3egVXŽ61G^U~{|q53Y\CY쎃~R7޳|\_\Slrm ^V+PWdK#t.>=^K`ipM,,և{}ч{}]AuhBCA^iWKU*c4V56pһ\ u9{ (I Bi B)8_B/t׈O ?4YmIm |55ValkATրTmv$h.-ZfnčfJ#%ʙTZ_S(0Y24n鮼O#+ŸCK,+#vX2r$`wvw^HTEKbiqB2}Pa6xa @M׹cԦOŮ|W]|j+e%ª[`B+/z#{.hwtG;ak% ,93IH#L W }rP' /4͞mr ۩`',r"8.@!,JZc()oEɚC5ӎM> 哷9>ӋVOD)1R+V4V_iUX;!u2H %j7Zbw0rR *B\Bd@ 5&=r0ǃ*-Q)K uo!U$ͅ6i=ʨTkմXYK\@rVAVjb"Z@ԊY0J]gh*>&BRyP+㞗ZeP+krJ:Y@mѳRXFpLnIYBMVfT0g岜4:5]LL.B<}[ʓ ޖ$=Z!c"tj +TaM'23,5L,=u4. Bӊ| ViEQDNffd;!>0X<~(p&a0HDMmtU)EaL*-iP fDE)ISX&O:B wdD삣 Q")Vd^1\m4u)4ƍeۿOvcF˸v#t_׬o!ĝ%KNԹa9.'w3t$܅O &t?`0elhzas_caѝc. ˉ]Z VuhVdfuWt1zR~z 4{xجKrҬc- KI[\ܹ]9=ŦrROjG> φxCH>tg!>ą|Y#}>@Z;pmڹT۔7ڦO }9ӎ.trx܏M$˶:Z]0em2 6H8R ΕHKϏNJm54BRB6xE:ZUѮ"Z4v+$ICNqE8u\jAk@u -&JGea7Ӝk@nk\^Ju@r?Y}B*z7_3l__r'`Nn6Cdld,^FNi1zJ$[+G%hV#4 T=8,mdae?AtڂfE5$R $o}˗ _}gFwD薈ݞ) 3&BhlCe[+tlh_Q5Dg:`q-RTe46~.IKퟚ굾qSz?}W77޼~-Ź0B?8>8Ϟm ~wU/~`VUV^l\[u"8瞜6a Y®wշˏ=.˰6Md>4hJ>,ap#5ۆ ЂrhlCk+UA,mYoE$G<)K'L`GV)g)Tw! ëbI{7+B >~ X`M.ӶHv&A"e,D.zZ+u ߈ad*gW @ ^seڟ2HCן.bW_SB.b0 [K0(H-рeKyķ3E?[G0cv=xv qJH&uG4yܗ1!ዹy#gb- ]z*+ ` PH:Ƴ?ox*K"xqQ p.e1k&%`ֳ+jN?n;rP,Յr!EfPqWxG3{&內{ uWKI ܓTjG:i}#|'b DkSRja=2%ŁwlSYٜ4 az}'.pOkҌQ܃ m]w#W醪Pܫ!ɜfq)pJS΋R3 3e6k')A 46vsPKvŢ{]QRfHYnU-$^`/DѾRDC }/nL9AzP$Ei/< Dp8)Ox%0xJ,F $:*JC;[a> ߲RLB[5Nf ~A9;&!)5MBfk BS(I߳D5T'=6w^A gK,} !N`a߶u\[O8'5 5m9X(T" r +ANBГ ʋr?]00(`op ,]zuw5Y[Iv>ǡm\><^]ϟʻfzy%j44E;DܣvGˏWjiD諎VJ9_g`M=jQ.~DNcTW#[b!q-$¤xa*6ץZhS_Z VǐM/'SOG\+S'3Ytɴ5^n788ǥEE^gctd$mu J4MR5έ7;r2tv}ɛ9)|pJdsF֑$"AX0T@F /QY*(CY䨴Dw1 8I2lZ)r>&Ժ2|={w )B7wW]GC<3{faqw;[nYlW%{?ЮNg?ͣ?FHr u|Z8,p[ச |rwg'xNov~o{w\\߃<(Q(|*~xrc)bi)&A *gHqNR0BN镋-kM N>5\YSmW57pDFA4+=Wd<8mofqxNDz ~0eh*4~u\9kZEʬ ӿ̂J{VY(Lz*oOhgq8Qv$7\,Kj=!bbB7RLQV MLjd&ʓV`36#P#ی]`hF@*OUȫ2MLn .PM)&A ۩ )4ˣNϷa#wiuZNю O8TZXZ-ϡi# %eg'k x0i9&A+z34Ok߾ ~  zo FQ~@Dң:3k,T vf)") V zLF=X뽳pKN]y*5 [5ImM´ OBHNR5 ӪQW$LZO?|7B)o'us)K4$,3A܂iBw_4}PzpĄpw !#t }%0ST!-VP4zԂZ\0n24Xd3ϴU9B/E Z ronv H1_hB3i)łN͌$us+Nc{ؑ68\RAKJv&ѴV ֪jvl1mHf`KT0n"{|+ `žwM$3yH o.R澣|HF)yX!_8fBɻҍ7d[.RX'm9Y!R{nـJ>8 g,-A%vyШ",̗e=$ +)scW ʇjY td. 5h@?xGwQ^9jrWڤũ)6O 02"EW?'#<,Nh3P4γa#EMm&sFMDGתhԢ/)> kHukᱦĄ)L-mH pl!> 5ķ}a16m$b=o)yB⥪5 !.Xӆ5R FTzƶ3!Hm XݐːjT4F'5[xB LjKlt+EӂZzbL|Ym Sja9Q.E3(L|j/Z:D|j/ AH# fVIo cakĶyB%M6&_xmEqT`<J,O--2 5טV)ͪ,H|/ Q ̵'T ^#Ek\@ъ7ʃ"›yB9K,x?zUR169~^ZHۏgWڼb=o2k__n SE? :N+5&&Twotd&Wz: zJGVk'q)|âpU1 h2Y3֓;LcsGN `x]ՙe'Oe %MAv,W,/F+30`n-k P =L}BhF2D 9Rɵnx +/NܿLv0(ZPJջtJVīۖ o `RV`CH:v5\0([CJ %bB᪪[!JC t:npt)E7)~}oƉ'rR(i~PNBɵJ1ACqba%PRf7Z*GpSz$*5R@B*+&j8İNk^lWjX+Jx}T=}q)D(޼i甔Gk;mS_'?cUP]~:6IT(AQ:%l,!Vf;b/5{h)?O;z|q;:yޭ恴!^Жz3[AZlQW ɥYe,SeR0Vo) bmh YoʃۺO`(q8{X 8BümA0(ȲvV uU΋2cV>=m)] O?[:dšFb!*`]Dxt>A8P\tMqWb4R-}$N 3z*Ȋ5CR3Ox ט=~`.S>T2XKsE[4޴Q4+h8nA# }s)Mē(K1fNdv@u$yݗq l̑JSIB/dSRzA. V~sz!Bd_7Gk{hzvF?^;hpupI)`Gpҷ0}HJzJ9A//ZѤLt%Q)=fU!un'}[M)eT@Lo{~3fX͟Dpzm,IM&( 5 {($m1gA E͚w𡶝U 9ov3If)YPf'%EgV4SIY=B*=GJe$Ä6urͼ H"&sR`NqqbS8%A:UU^Z5M~3bx 4;|;|NF~<$P/'oNbA3W3k˱GfUcsˆ_ 1".e~gҰ/ :|'a<ֽ+UL8G .j%lHto~9njh/]Jyv)OZ9"|xF.jrYm0Nm!&I! U* B$a??Z-)B: "n|?迯ֿ@FN'?ڴ`8]~/;pg,pwOvJk\!Q+E ZD[EW-[^FXbϑ3^?Y-4#(681 ~~6PF93O|wi`ɕj-mP C/A3bYT:JgiHV#ֵL@{"bI#Z[%\K[&Zؾ\#G1NE,~p՜ Z,;t WEnu<9\-T2NKTdaX~/v&s?glX7o m#BPC[ḆRsUb<, u=4`XЀ VZ@rn6w ; P)' DN,nQ@y /pDҖPcInmkVǕ5RSZ@ 粒%/JU1sKd)?JdD=S[{}L)}>YiTJTwWO[^=)JXkg41"K3z-؛96Axpođ% iw2L/jU=1d1>Y3{ʜm9f%[^)/;_ϝ܍g^AL=PwSeg޻1zp*YFto˨DT"B8ɳsӉ%^2gX9p33%*Hy~{>ubiڳ9y(d>sa ӹ.fo2R}끗Q\nژUԬ 仓 D',_#yLHZOBmxL&i%VFcnzvƴ[v +k9Q70'm*zD@N1IidzMc ˅lK;WehVx@%8Dք*5Zxok3YsǤC\~k̑`/on@Z?;7WQNKc|vumqAxpO*"aThPHXJeңN1"L!E6kW2MB A~m_1rvފ,fO `g@5I|Y˙bۖݒZ-'dX7MtwKID{Îcd,7)MoV?iO O O O |7iF'(IF'ങGk5$zlffO,r%L$moe=?ye۩1Ν Q)$ƿԈGXn9n>Τn>?mc o/Wȷ%?ZC(=VԞ D2  1@?z3-S;ˤ>2":hqJ%]2N[jR͵+#rO{(~ցd?TJ V?'4{~yaѣ_IN+z 0tu}c#ӛ;/_HS޴{7]sڿC?^\e/V>:?3ߜ Fjq|X:A}ll7d#VpTyD [iE߾(r|ZzIUSh)ݷoUB:Fgb Lނg(G%71<;[c͘͠ڷgTZ0Rf'EW}P_I5^BJ!o ]P7AiNcS+Me=fMƍ~G5z"*'Ri0kAħfWnѮ_h@xe+}wZ1?qM_6i eF u~jh:>pvUvӠJ_iv)"V3Իn"ˀFJ5N`@mC^3bdS6~{Tӊ9^ŀԇ# WKi~Mӏ*zj؃xKv]}d[xP|R\g9)9)9)92g75%ml,lRRQ:NH1u'Rc,LzsNzЎV3c 5nyrsHVTQ~w 8/rA &2nU؉FpUThwmABRC`ÁrH$2^KMԚBNB-& sp1K#fnrPVځ  , VD\oXx>ɽ wO՞a8||b`Dw,6^D|ʍ2tl@4m׷j# `on7Pc(KhƠss3G*/d돁r*,1! _ kk.%] Gh Y!41gGe[3he;P( W66N/{Uv=ĻV;7_!V̱o¨HקP *tp%9PUv3{עd O~n d㏵dwcV{F`j4( `b1a.WG7nZ1Cgv3#h5ҸYv-.!r ~uZL\?zAYԉ7GӔe<4Ny, [lkފX]W^RVdL#vR+ %X%.dK{jBF;~0HF)un0 lpN̚6 XXꝱ^"+fw@5job)# (YÂ!mHyV%͊9Ш#ŻDHk ̓yXr{ HnN&V픧T)4=-|{B)fp_IVM{9WkJBZSY=u@,EԻs.L>׀zjU] +!WJ#z 5ۭlSY9RvrVbR@LJI1VN5g\qywѤCコi䘄5ZL;{FU_;uDjM͔E߇75X ؖ9(;~jA_f[jkoۉl_̲7'%{sR7'%{s `rJ2W2DgҎcA%g4VW'kʌ+Љ m%{˿c (4V E5'aTm{ |Tj9Cףj9=PlďMylT&9]!@ 1e`6*HFx@k&1 j 6|Qj_¦ӵS>'Lh?57'/ᆱT/¶5̟Y-J$.;V5\O|A|- |{+%X]㥳n X͵ i_oweCk沪u}K,/eZףr:Ѡ#ס/V;paM`%oI+vgxRDIœ(Ζ~m-hI:%ВG7 #!17,@1Q9s5ohoټ]JZ?$!P$Qh=j3Ld03<Ĭg@S>GA^CwY)DrK`MP.MKFXX !IfI;Ly sI'mhs"&:~:t y薕BtXoոJi%+Ypa\i$'g@񲧥9lGaz{ j3<NMr;**i#r G<BِhFq݁yBrH C* ã\;<,KL/!0p mF/TL.A:,G`D VU{BTz-=ΠTX499) QhGG#at,i5usB`)lb* qYrd/g ~U^kwJ CPc(UyNkP !}+!HCF%:u9ev!AsD/xD/CCP*(;[-O-\q%'u^x!IE֒QGD|M($O -ݣ>jͷh`#O1J6rH(n^Jqk8!e{<>3 tܑhΗ_?d•3{!PeU+TA<2!q{\``ŪzpmB+Y1_ Acve5%<|\s6:GݹBeİ$*E/Yy絜h}Lj+"JƱ'$%>MU=-9H痿{O`!$2Nx&2"Rp/XUY)rA+`=tvvyM=ma'MW[靿M? q?pJ?^؇s>/#=>}{WZs. ?n.s>s6=N*MvӥCnFktccso͂btT5!G7AW #ږwi兊I-X vRu[23܃b<`.Yʀ> șcriܨ6x`>hԂLkވ]D`Q.o><1h%-~}$u مhQjB<y)%80 G26k5M$cG! mT/Us-z JP d^}d)*;q!#Uye58x' VeZQDpd{>/}zzL#R$Imz Q }$`:G~e9~P}mbLW~6YYUb^fޱZfBG:r%QzQK&+M/ǎ$7*y2ĵ>v鵾_/wemKւnwNώ\[nǸֶ8=b9O._J3Bwmql%J*=+"yq&,vi.mI*Z t͗}qvɎ{6 M,M{WW#? |2xyw1Z~oo0YgOCm,t Oc\.G@dLk..!y3.a[d=q)ݲjSjr-2Y|~Λ,{doQԩ %ݵWOC>{8/]],Y+ÑEr&'{^mz"z2S*1b϶e(ZdoΣ+ ux\/ PK܇g͠CsJF;,w!~>0ۏ?~r E/E=+2טw={`]è'C 1hjך&[[BۢWb"]Y"f4Txz+ i(([h\.U )et2VF=ude5\ˍLt`g±PD=J T]9YIe=/wyP1︐벨 ^ߛxByˋH1%.=9!#ZHBy%R$N}|£Ά݆`BO[Y]S)/\-'ЗOdBLj,ƴc݀!t-^wy}e^==ͮEjz(A;xhW"ɭ KXn#Q_.0.SxD:^dwhKR`CVRG(,elRNR<"'wV=uZq 'OH(;XpDzKHߋW`z`;:S嚵 9'#!봜^r̀bG 1bq `T*XÓ>F},bB({ിT 99et OF[(8pp5.ޡi:4I_4bDP=[6xFXWG]}vrks?JLپm),i,'W%i>0Ƙ!wP낏۸a\.x Hr΃V64ᑹlaPMѫ7u~^qNSr&/VA͛;{ӽkd7gyznlz0u6nȍV+!g`Ժ}uҙz 0=iR Z[P3r~%?{oA-` A~h7q#/8 .doY)teKax;f!-]aYHYh4Gs+6h"^pe5 ``"[15UAImM j3YͰűfoNu*;{C&)~M)}e,4GQN*gEқQ[-0Xya-/3QNK<(ae&S!A((O7*/~P 7sT3ĹU w+4*m5 a_^_T/uJTUW9p_pfݪ.kǬtw^/n¿e$!zVwV(T'\ݯKs߽5.< a!RzɅ[D\x$!/\Dsd*ޛնj7A r1H1heԋkڐ.Y2%бy vη@F3DzH)j^mB+A9T-hK7gHC FBqv\FwJa {B 4=,t,Zב gYqVᨺj3?=cLYax7=2&P)-ȠZ;.;CSV +uQ߮Q&W/(0e)pF'*ui#%Q(M7wyM#=i5dÐUv>Ss1Jcl}znB$)>Lۚ ĶEQվPMC81D;(#h@~pb4 y"%SdĮvt/Nuۀ"Tٟڐ.Y2IS0ο6WÔݡaڄWJ`/x8(_MԖ z􉒔%)r~1֨ph5+ 1:¥UEFŨ(eʛ}mf(3USxPb*y!6U)f 툡bŠOy[2'+Hv/饦Vā1xm`DɘUxJQ|਍B̧/$~8CHW q'%( `i8 GiڅG}yG!! +ȯg#Fo𦨧W>(-On2ޯ~nfCmsoUe,f\p㎵qL_1-::"!-}SAKN#t}nƉ՘;1 wH:Hgɯfa?J,/DߢI%ԧaU۷跷ү'"aN[A~JX|vہY=xcЄvk$P"jtX=e?+ S+maŒ{<kZ1S0CPe!=g܊HSY~#/w) TNqRPK)u7F:=7KaB;)8:M)j8ˏi%W^9NfB0A));=ϗm巈 ^:z~>8^'1j7fݛ_rV> #MjKDt˓OSސ?y8^Es^[kvJX'FJ´g) ]_ oAǽD^J!(f$-YUfvB;veAGr(g;e Z'b3lK" &`hD%Ry- H#|?./7w]L.B&0icʵ#B A l0+tu(tک0w4`IЯߞ*#q-Q -q!&X2 !)",';ʉX_^pfr_ٯ _  x|(*,Iy<ϫj}_GN_$~쌜szӷ` @ԗx1ExWɏ?|fgjĻ7S-|9gyX+dM״9|u=v0LѬ\,AWQI첅GCeVSx#SڜdFy\j3>DVtBZ>|l-iUr=AnVrj8UL˂{!EDS^\vH:`'~ߋyY_oq\ҒcWW$H5:/Jiidc\cp5F_'oJcGB5WQipZiTIRc×F_La1tyҨkV reJ=wFu)D>.F%fVj/ɥQs*;xcfjli',UcKyÁjl&/$UmK:y_ژd w`NŠP)b)?~Y#00D `Y!"i87S g2%+y S- ͎<(ǥ+8̕r"ԴӥBk+LY2WTLd%yfOգ6 =P> ҡQWhU'0!H-v <]P!XQ4Bq;5kh¤ 3,ӭau|\]Б%shj}iM1&6!Kk%MVBs1eBKSa` RSl5)0TSQ:! \ޅx~%΀0fh 277ZDKp0+Dg|,JU~ʈ[N,nshׄwE\)cCΉ'cJ)ȔB j xW9;Xt~2F]k+m$GELd@=,k.Tuϼle}$WWϠ)Jɒ󒭂QTf`0ȸ?4I/sF)eo߾~ĵxq6FYb8HSf%f-T 4=r>(f"7$!p\F$D&.eB @__Oד.U|M^!*(j9(W0l|n|P[5םK*_cִG] X(Ma@PI<=平DժB֣˛?Vu'3G؋C3hdbWbĂqeHYBLDn)+*8aBjR`DkA[-#ć=9 9 &+A{zrx(A[R*.Et+N_(k5+ 0ʊzQ(TU bwޒDo!1hy]J+ަ,BA]6* k4XĮEA{HF%€p uYn/r-arQn?Մ`Kk1 Fd#`5LrH j | 7 h'#Yy;K?1H`"b֔~_ 4NZtTg6# @?xS i;g3ɮZI\$^l 7u>A C|N\gQ 쐥V3HLR))5X@g)r ♠㊅5ڃ3$Kj0WR~Tv#Zخ e#%:ԁƭ'9kmm/QP3NOE K.De?ՊszI'T!]GbxMEY5|Z;'n.c"m6_CJ__,1 |#߳xswg[$;<r2si}^ F1*Sse0β7 YRqy*k I,d un+6햋A>vrQ ilDքq͒)}A5G5hvl儶B|ZU |Hvѣ*7i|g- |Ru%Qe v ZInԁ_ //lv{!)+0aMYl9ڏ-= e *~!D`x0ŢJ X(0F1&.YbcL(y|oUP[xbM9TF2 k@NcE0Q5a졙mIIlSkD4Lѩ/]Q}ݲ}mpW<\礷&[`>9* iR:hd} QȵV"ی>u#6 4H96 TTꉗx'e:.Wp߆ "avV練1hqX~eFˌ $Ał >ۄӥ $P|!ȳ2@G?+nt;(_ǁ,/z5W==4GXz9g V63 r BAo5#vI#cJl>\]]ZAw2?q=+y\aϫ+vzy;60T5:r&,Np?mH3s l!Fq1띝st6Z; q! q KiW#L1T#8WY$IA: FR ~p|Fx/$ w"̑bGLn0 B2?9Ywr1Pkƙ۷)߬Xi?mKR [bۢ] חiN( .K7br3ˊime.Nm f  .VWioP[Bq=T g٨Y  ܩo ɔlk:4RYŘbfŎ`*VE Y06>189 rСX bn$h 6(<9*`;$4 :0: 9\zM^!J ,ȡ.bKX D]&~8~x. |tO08,fl &3S+T ƃ JoD~+iN4Cb-%ˇQ"= `Ԃw:0 CV*W0QrhYiL36jA +B= %RlHPq؇ q `u$QPa9p|P$p$0Q 9 VU ̳1Yy"qc;Oey2,ۥ+hy/6~upe|_GJT.x&Wk YBߟ=͗˛/N§x\:+߹ҳ>;K>܄#h \<*ݟH&,d:!8n)@ʐVhajB^y.)IϨWޣSz)[yMHѽeJ-1@)kq|,JBuLEX]W8n$S3ćp[7UM*)Z,f =If7ho'e+Wz|!ٌ aڼC (}g6r"HuUpE3k-\!{0!m-MI1h+d 10kLՋY*Q)NoFvXWL͎g K*Kj &o̐ZE]5CK0BTBsoj`̪{2 "i tARx>P>|;Jap?;]%&JB6<8<X>:@ ^ pN{kUtQV?6mQ[ oۛWFVz.4GdO19q6BDO7cM' $[=<;qkĬGJhb=X\k&tQ)FJAQ58fGU<l'y/@J&Ü='3M(Y"O_x%}7X2cZ,!F'm>@&tAZ#&)RNp"O(X\Fi@^Zs+/oK:6ƙ$̎mB4A55#)i]NYڙv:MY?Rr%PeXkqvk]G#'[=V Կvhɉ`w$?Pa{> i=_Im#J;/e?%1%6_/y`e/qtmf'eWN~ +~ 8O_#Em#nmCiooa<{l2ϝiW2Η_NԑlkNpɳg͇q48{;$`TӏzC *vJ?FH=N4|%y[-2{+@TYՒf-Ͱm8tp8j#E}y/ԣ vBJ&zo_v|/2ZNJMBlSnd Ұ/ɲ̿!tNx31Tٱ#ɻ:PA8;_h_(Ltɂ/+CgXCYK URfXV6V:dNCק̵i+Ma<(lR-GU(iihK@K kFڙl `bA/Y' 9IXǗd6{.V@c|vNܳ> үFLh:t4AI/Ԯ hb>[F~9zg+Q7,5㖿ͯWWۛ 8luo~]H_y7->ReHO/K9:[pOͷoFܬR5(7Cr'eFŬv5aY q1k/ 54G9Q'o#A˚u#1L  ^F3܏jCh+Lڹi> x4VP[3I3 G3,ҍq,nJƼGb7-Oa=[JIPW_?v P-tQXGW[nF5üN~η'ӗD-a%$^g_b Cdְ% ҽ ?St_^Ҹ3/{:6LUs2| ܦ-z5֠j>Nz)NJpE:u(2*A'ĭPSdPaZj/.5nc!\U3hiӆgNDikf ZjQ>&G&{:MbyYao_@$ii-p?2A[$&ie?YmofU6}yO q˂j |sCŷ0PG9$37i ipiۛm+^NCpm= b bgJn˻=ߒ;ߒ%ߒmx Ѽԉ^&ɞ|[ cGm5hiY|76i0j$2JN~-fx0}zѽP#B瓻z͞]"]U\ {MOߪd&EbnWa4 >y| O3}y]лto7ts;^}3 6s`)vX;ے|":J NNh7X06-3}|/rom|dvۆ|":JMA^ndXwn zBMYinې\Dirr˳eN-򄵳eڑeR֔|4rQC=0U5j'Kq)Z12 ~Dcp8ILJ:AbԐZ>╍/N5$:Ď^}Iy5b*LV;>郚rp*vbYBq6trh5:Y}bK1 l AY,ɇ&K {Y(U QA1q@z-n#RQ<(h!U"뭎1a@Kmd=K#P#+$,dmMCMP'gc I♼IiEX 3x܎HI =GVxރ ֟IJe"BVae˨uSU$A܎;Ԩ-TAۡ,NCB}9#@XRiSϖQNVusJujy|:r+M01k~j9tz{?[_no޽}`&_|5g_ƫߘ|sFyכz2$p~{?ԝuOEv 2oX·dOAIFNpJ)qdd3V6[_;oX9f4UKC oUBMS b F"ż.lOLRɸ! )"dRN[Ew:*tKM}LY|^; I`^ރ CoYCmr&v;'wVI( gYaYa9Q@8דM9J/D^mM!ZZ6zsཋ0)c*NՍc42 9EJ4عznD]P|6//]0oˇF $SVP;)mMQEP*jjEpr"M5̓$MBB}Zw Epz"->>z<^DZpaF8BH+(6 >wiw^=6L {2jul5*G͜bhk EJ)N\" X-:%@Kh8j+K0tlqUEg0>jZ3[MurvtLet!25.7tg/WJ3AQ'0 $-+D2 3 t2!ÍD#PS5"/\QF):Egݦx.aT$r C\&`LBi(ր TmLĢKa]W A("Xd= ޵$뿂nA/`{W+[< fMc.od ]4 4lL%"ȈȈTsyK. ։R]@ҔN.!ӁDcZhTޡ(Z}Oic)6lYTj$EArҠ|rw9Z3UPwRu0WLYxzrl8>,&ůj牽Y'V n4'%065 @}6ot𵠵9][T}´c&]Xh볁d$(B$(0zޑ(B,h<ߏPPYR2}6@ T[xaCQk6'-xЂmUsovkro^B Zь6@,5[LfgG<P+h1hO  Cyxj ZsP6h=JDGhY4t-c"1Re .Je4ÅU#\?ڍͮmm2g_<.K&Պlh&-6}5w!P8jn$}}eladC: & ^atד^&H;u%@ۚ9j 8 b]@&jA( r:܅)z$,7QaC\X7QS|ћ![̒#?m62Ԍ!e$RW$5+i-N\:,5R ]RneW,bDT5ގCw)An " _W:~{e<΁y?+O7SYY$H*gspAX:f2^=< KǩLfhuչpJ|5?Nj@e|<[ V.(ꕼ\lNMץsjq\Rqjόx1#b&`i`AƝ'h;Ϋr\$#'@T(}ur 4|Sn懛,?W(+E=]U(zZ0="D/ĕ/dM$SꤛJdj[nۉ)[0׍Iwg@zn 1 tD19-nםh%[Hs0_?Hl;?YB6lLU5v9n`X}㫿u'c13ZaD!F䚯>9djN;15jd__qv`{WҒB֊Nouu&oyŃf#?O9ԗl3FVu]"= ~`gX:d7[-pCj:ՙV횄Dg1p8%լ~2pnPJ)z*Hy TkEWN=<܇稾yBlUPgzށ y*3&Ω bXt9PA,T:u/TANOZ/b'I/fwFf`n7{3O1yJXEc! wpa=Q#$>3WPl x KH  Ĩ4" nQژ{91Q"G,@ Bi㑢; ib D] f,Omi-!Luŧl D >[d+=d4@b`X4,3 ګnf0s.R0tt#a1-ҮP6`(,X \cna8S㤧AZkˤ}#Ga%vS3vPvN 'S؂sZ_MJf*Cy](GTYy[޿s BXSX;}-xg&dAʲ<_m9DL,T#ܣ$R,87, ɭ oPU)U"q.xZ"aʃL&: (~7ipI.?CH^RR o$?@g:rv.[;[; YRy9\ 2Y/r&CQ}N5'8t6WBlUu:lDV*Vq܃ sY{.vi&,mϿ:BZ:en4_ZVh1@n_ Y-($zSFW}dixWa}!nP|)HotQ$B? y6,Kd0LZr?~#C9k@f L.Uh8NKL 4v/3A%oS/Y"Ws7ܭzHhFU{WY\0TtF>7At.|^Oŗ#kcS'7a>/_?]Qd[;6MO&n?#˿"b:^ob/0IQ,:LQDMv7-y8,v7G ^rvV?nw7-]Bqonct_E0Qto$W:8V~WJ3eލrNvzPd4jE0Bp?j;FNN[5p1=v6m 8Sl{ҽ)2`mJ"7Dx1Oo=X8͕>7c06\uhdNBSrB88`0q+LĢc'&'q}:rŰ%URokz3X \Ҳ%N`42,W_QXL08_'qQn0鐧눵㬘l*ǹD|6{_0Ԛ/x5F9zz9mp>$MGgK) 0%:I6[Q>R 9dB z@h -!xrEcc@~*d!c 8sѳaxǜOZsn#L 5*5^0(,]o#R8OUybuv#MlN? 4#!ȕp"1^YY"h ԨS:LªP/ߜnq!-gf:."`QV^ސ3ü6;yQ}Q%E6 ~Vτ2j^1PitCF~`uH=j3&Z[N|xLlvu=jLZ7,"ɳK A3HWfЛ0#V ɗ%`hrghȧdrhvTgZo7HDd:Fω4:gÓde-]BE_F+̸l `%7u649ubױ݌)faGJY.KΜ\.tSG&ه/X4Hbd3fGԗjl[z`0_}Yyi610|1őVg<\iHDM2oqZ攅 TI5t6!H6`ofl$؞  IlhC `ɗunY*R@A>tao8'> &_-o4ıM c.SXJL$-c]|=r)E>b΃C \ CR  ,9#3StRI$7ս= VI[M~-}![FAeTD6@iLk|D}hbmR:o|f-'>_@G/Z7>CϞ گjZ̋(qmG$DS]uj7Y u:a 2q ɍ(2$-<$PؗRQkMj/<-oEԭݘ(Lj]筅ƆdFZ-I_2$ta ɔt}g%V9T}Е9:7 ]qhHa:4kʵg[<\9b[Om9$V^&)(%|$PA W Zar[DwL.˙U( :ڣ)25h} _pDɀuEV %XB|( -Yߩ;#?7;ZqiEvSB )Nԣ4]+>ݣ28;}rTv,-1X(o~1"=^\Obwy\8Ѳfqu"_E^+ܨ3Lnnr}ݽ U'y _'g{H*v늃6;vnѧ!y0жڭgѽy ,(ϢyVF -[$@ղwKm֪Sd7ݲVגaez"[KQAlå 6O.JwZ-S#c݁pS3W3a]enNE<7e1oV){,x^arp?'o 7UWhאR[eIuǡaXCFs΋/Y>Dt쥓B4ɿDR4)xR(I?P:&55Ը'gjz .j}i4ҾpH y3`R0\:>w?PWR+Xچˍ!?%4|>1p!2M ^bNʐ{;'6a0 :ۣ q*0}f? i7,yL@jOAGB]L_NDWS RծDW#U qTg_o|Fjhh{5]ڈZq3_]ٮdT/;݈B;;(ZW=?JL$I:#4/GHʌ>$'M%E\5ѽڐ#%c+!!JBq+T6y3u3@506 <:<__'~Ow'{_|آt"Qo$|7  VRʳ˷o$qϜ9?9*ӫ-E~(|>l4x\"zugCĥDGgo,Y97G[<{wmmJ e"9o ĉ8r-ʹo-3iՍU_* yo`"דv=kV}Ɗ޽~f>;e *mfm>i1X/ f n>=U&ӈX{L:{ ZﮏW]rB^右M]𼺵7w{17(zqu n0gZlmBu)0N9dJu?Zưg:}XqGYJbo_ʶBb* 0d@W?=/ɿ^Hm M?nTMFcHLrUd@_>𤒵6x0q*D [\ZoXh2˯v,5jX*.iS /C!h浑|LBr5\A1*>|8ꨔP:R &ZD|kG%yҨ`\E3R}P>U9{-c4$̹jf-vLIESꖥho9 kBI!j~5Vyf6<\ \͙YX}6.>;YCDvĎ Xd0љ(\h`\ܿ-a#`rg{Kߖ S${k ZeL;Igo?ξ I& /87r]m|rF*;T Y{ u)U zŗ+sah) MNN1J9z꩘g88K;p@WxMZхP98_`1bü{ɽ]H@z6+ xEg _~]VCIǔEr.-bdUN>ccgc<;;\~{\JJpuf"lPXTsߙ*Z+\~ur~rTk%/[2R Ϲs80V N=LE^p&j|msSQc US[`E 1RR☸}ɾ[04[llz:d_Z_|LHDP $oM3cOx'P3DXQobVN9dNbŝҫUysv^cN^BAecZ/g;|\̞Å]-c(,T2vMsoqr>}ê|(p[kX uY4}wAq!AT[%; \u<;F-aDbKtC Nݾu熗V @>WW~9zk# eL'ه?a$tϹ׷7>ACRBpn6f2 9<_ZQDʬcL.qh ;\r& $Jkӷҗhd`x"YQ8 ^cZ] JEUlH矯_Ⱥ}6ۯTdS7NX:I 깗F e`8:}b}ۅR <~kc!d!ƙj睬cl̯ycJ ŧiSJ$fɷA9Zq:5.'(Fbd뛯_Aq 5 -2Ո\1DgMŻ18l$>1Zoȶͩ8.KQBiD8F3B:}f'>t EmTM%eO적vf8×> h#eNVzVj:X=o8ˀOƉ ן~|uU4 StvW|Yƥ}~6O=U~yG*cpuE4 kGGwG5^Zw, Iɳ#j\߃ƵfA\߼Zt5/j&3\^&[IF?1XH> 8Ed ljV?'\Q{do-jݍ}g`wMuu0< z]F=4{J5*iH;+ˇ"4tz:Ք0ҩ'#r^}1z6U1J\[ўVC&n/&輑jhVg"ܟ}YzM5H<@ݾ}ƭ]zeO[_o8UO@,k%_r2h"d'c5V*"=p!c)v^Q$ H+sRvGg-˔.JTP]YrVmfM:t5 .L%i(k9ْ"ɢIHT #D`d Gқl_@iݸ`K#L3faHAAV:5Y;ZPzwTfjQ=}*pZ{>\iZR#S4kRаi}ňg>۩~}j>|жJ"?jg'+=l+Eg(/"tկPMpJ"J Yk} _jJ7T#SGΈoBЃ fWzGKa%Q1n;"ϗ'_n'#rf!$_xEEmDV"Ƈ%Zym巰"~ڇZ'6|?_e;NX 編WOWoգrNZA&]+Ke'OB3?( rV4KtFiFj^WwZ弿BgӶK`򏏫˷? ׹[;-lse9jQSUۛgՏ.yë{s|+GEꢅ׶23#hl|&`;CuSB%g') VϨ@%7L¼;uO棄$ȵN9!DĜe{0ȩ;pRQEa `3U-mR9Q3IfrҁZQy+Zkv0{ήcl9G]SoW=nq# K&Ҏ΢wSGY$=e2 EBfݮ(񆇌u267%4N;T6y;: æ0g5~o4QQq>tUn-kD~AuNbhp|18|:1U_~Qm3xtHI8'`=eu({-)RY:R1N1؅EtQQHvhh8'5wp9]Go~h(C9?ЮQܓvb)ЮQ{G4k|Is Ιe9za+*0j;xQ2HЮQp ̪Q {N>)e[ֱCΩEqC%8Qkq3iZ%4J84w_uL㐠s \{UW[^uNsnFtsR+&6FE&I%Qҁ2Vp/6Sl)'ڡѹ=Wu p])C63wdYj0XT[K%ͶX& #v!3KwT)KN.pØ|s*{PsЎO]XVT, mpL$HTk( m<6v7wP&^;:A6(9Ks֔&ndibSÂڣ BYI-VX9o$g>zA'Sj_D׷v)8:O>r3$.]qyΫO`߄!s6Z_e(H(IlA Ә^|n   [09k[9ﮖ![ڎР_]".W/NBy{NRGt1x@wqyG@hG7Ԛd62 9sA)f|wV6MUt. {$jQ Ւ:pHߜ1ʹ#QSv©O3 iiYw5MD1Odn4Okz@CFx'{SJO~iNMd6vYXe}ε;Qqy |N*?S7DonltխâU3|`}T !22 wmmHyPx]  ew!;F3߷(;,q9T}Ud]HVU/'_/S~{6~#&=lDd aon yO:;iDmQ(yHIkcIGp%=% I䘌F\GIֹQsf~8_%N(]Oݫ;.k _uomƔ ?|lj2/|qDTp$4!7b&Αv"EU-[`cm,Nm]ߋ[3t]߄.'LCkE`ܱ77&LF q(chMEqr gF|>B7kE4Ƴ|1y--fǂkQSb 4{kBh7M~7ٟ͏dlŶ܁Ap{17MwQTJ6(_:R#ہ/x^ץ'Wx˳̮ή?\0k5[STN86%CbNJkRIA.[wo0`6 _ }9y~tBR=#¾WCkpz.{7@/p+\7\7aTlp~+ *߆vxcyhGi(=BD'$J3B\+4o`9,EC|GByZ,5{FdMqr_O]bGy|W9n9ho_FGX_.ozt3¿gdiIX̸!i .d<%; +Q-D>4㥶34^(6nVJ wxu==X_KmWNvoN wz)ZtD.T-b|~sPXwUV~}\YmwZC]e5v%v!CX#{Rn~"Y vNJ?4w,@t+\ew?ΕxQf.uʀLEeXRIXnĔow%^^]\E?E瓘1{H2asM2gs획1DK~Ȑ =}}ZeFg1e&9V'f j=vf֫/aІu&*;Xh#IX">L)m\) )h`*TjMڥOItUR 'JVݗ=Htt3EA^m@4u>ZXK5hEګT@NI-b#q\-#^p4Y @Q;1h[ wBմ(ϙ\lCo\bH4 Hٕ" QEލ+jd%g"5){@5kN͗B̝~"yi@L>b戚"r*tJru\ }䖼9PF7+]6J)FhCVǸPR E))WZEk EٱЭkKsVt)m%HWH![ehBrVS&ؑVdZu!t79];q~lLPpynTp H{uEGk2[C*X2F("QT\%8[K.=|%C[Al63}! 녈1,&ZQ)NSJ1=ВT7ț¯{i[8^Cg }uĚNQbXR񏔿D67̤qYU,|[Kyn氦m[6nI9}yc~6A2G`+9xd 2@G)/M:E8LY桁>YzFD`{g\xNY3çΨh[qZ iUSvfLϳ[8ԍZ2%Y\qb+u%]q+KY`) \쏸*OXҽ8aip<Ŭ--q5$Ԛsi^$RV;ˉ*/FcЗb/e5|_ETX~ $,u%|{ o=C:%FBkd=;``[iTP} Ʉ]P5d,Em/v0(&o?U^aBT$ AiŬWAyޣBÂGh6/W}k6$h,s_> LD Бa΁%Uur b|z'J8)?Н~]L%Ĉ4Id#JP%i#/8H0"Ukd>L/Ϗnw|:kziۣkQ8Jyx'9}M֡6{ieI9zwGFZoI^[Ի&zг> ށ*,[I`R5E2m%3Wf[k2&{ bc-Pȶn  zbnzxk@:Irn ڤ2A̵?nA^tzF[n1hwQ(tGgg# 5 ?W-ύ&:NƖ`F$9gNbgǠ`z-"$;{X7*PmFV8#`cd'aދ,-T207B_R[m͆.F|R,`JfRq!=KRPW@JKpbA/ ()@odggtUU>Atug4}#saBZI#}@cI[Y]dd YI=d(O0 P EI2Hf s!Zfr)%m/㧆q=ImGBBݰM}zcl)5״깊 &Stk ^/ff\c~Tݵír "Սzf8H3I>qM9{pݻOa7-nD?0 k`/yuoOWA #A@##;@aЉJ7O-x ٜ0FjjRπT#vh%,9K(vTHM'K40FȯԼ=qOCCZGl`AYøGnSNz>{/,)@7 i ˞/Dhʆ!"NJ8)E %`c5%#N6,6hωJ5FG8!O'98mu^NȜ[ *iahێvHuڑZmSH>'>E?W&ǔOn4")0CLPPƳodL8(heG46"Ӻ'^1i&eҧ_K(6q0Җe#JTѤ{eЙCgёώꐀ>WmvhtkXB;/5s0x&p40#ĒXr7O/Eg`UF4f%+^qJVZb5ۑ;V:#֔(!BFHjd9xjBsݳ@2cTR-(=mOp6 jgb8itH1Ԧ^lQ;t-f:jO㴾ʁ<}r֜Φde(cng1nYOZŇ:n(^8e{;uޭO,ԨD1*)P%Pf<@RJ?&Ti ?Z Pl8D N ސ 6YZڪ|<}|N[M~UrՄ>Ag{;ξv6|?/Iz>]Hg#A:L,FI2P8sЇ$WyA7=)j`@䥚=R[/_)//myWK~BYT*_=^pA (zh-eOykOL%釘|{.Z(jF );B#NȍҬԙT5|\C4[UFGjY[ 7F1MHxq'v#./]^:tvyYld$L9qhF*N)%9j"T)Tq|5pF8 `6kˈAn!lq{[`gi3_Kz8mux!uӘ׫S>ʫ:R,T J#T /]Iظ[-dˍ sFi:8,B2Qk.-Wݓ)In$sm,#D(3"P4ne<3K9i#]W il0׬HXP jެQ+Ej(Id&FdY8͘N#T/$@ %q (mVPL(0+V d>"44Ԁ/£[נilR ѿiI5 ab<5*7dL6N.t|hS{ZvO&&ȅy@ %ki}#xnlR(-%9radi<-o仠xh;[hK}k ߪb=it%n5j@s1*3V]V\US<$gw[;k,B!yC qiCF$RH-Z=5!׀QO4_,{ͦtym/lj}ͯ/m|4 nW! -*ޮk^ӟpFF[NQ?׻>µ.4lF[o9ؙjAʗB,Lh fZۛnB[sm8>]h+̃ghH֏EF$#80ld0qca!{ 3UrWvX^q}]ׅ|ndtTkQEa93u,9?m,D8lwss[Ghͭ:6Q fޢroT3r|Cngr!pzq,.$T`9=E|8=-&)Շ2䬺~`67厡3tfK &>GkGV]nyp*lApAuwl/œ|WfjOv^Reql# 9rmeS\|x9أwk+i&mѤ@nKAޭ 9r٦SIL eO sϪ!h`u]J QUՙs8%E㼗ܽ<)fDnąxȨ+}ݸ$O6? 9lݙJb6(,t4\7gL+OsnȦh̓`|,O1u^R͈ͲQQQ>yIn15s2jʺgK>uW4BVUW.rAWon Z^ tqJڰ$TnLHXޑ9Kyβb롟֓uyc{hN E󇅏M-ɚӷHd-PV:@DƱyMRхԅ 9Sm8 ^{R_Z^jM% ўT+ ^"̞(5"Z$(&67|_~V!SyBɸto֞8Rn6͔M@N 8F6#`d,ԂN%ac#D@(y VUk 9 %0i˘{dV$$ SI ) : 0Jbm,<S HԔ>.Ch`=!D;R&L -}=BZC|{7 :ҷui )Fh#=W=:|OCsQʅf<28{$oC12 ˁ4UCbd@x0N2`ARv(8Ќ-샃d ${ƛkCp Ow|7Ei!Th` Ka[L>wq)Ctq(_ـɈb(UV/nVzn{("ȍCBݵvyme3|M&^7P[@ xwo {p-֓I'fUsu`%pZ糑[;68vݮU^cMBߡIaParp)/q x YtLQ;SRխkSͶA.6 Ӧ^¨[Ƿ/QڳU0;> AFY۟var/home/core/zuul-output/logs/kubelet.log0000644000000000000000004777501015134635430017714 0ustar rootrootJan 23 08:20:13 crc systemd[1]: Starting Kubernetes Kubelet... Jan 23 08:20:13 crc restorecon[4710]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:13 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 23 08:20:14 crc restorecon[4710]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 23 08:20:14 crc restorecon[4710]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 23 08:20:14 crc kubenswrapper[4711]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 23 08:20:14 crc kubenswrapper[4711]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 23 08:20:14 crc kubenswrapper[4711]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 23 08:20:14 crc kubenswrapper[4711]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 23 08:20:14 crc kubenswrapper[4711]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 23 08:20:14 crc kubenswrapper[4711]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.706928 4711 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711183 4711 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711213 4711 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711221 4711 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711241 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711249 4711 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711258 4711 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711266 4711 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711272 4711 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711277 4711 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711284 4711 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711289 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711295 4711 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711300 4711 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711306 4711 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711311 4711 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711316 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711322 4711 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711327 4711 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711333 4711 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711340 4711 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711346 4711 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711354 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711360 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711366 4711 feature_gate.go:330] unrecognized feature gate: Example Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711373 4711 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711380 4711 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711386 4711 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711392 4711 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711398 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711404 4711 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711410 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711415 4711 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711420 4711 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711425 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711430 4711 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711436 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711443 4711 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711449 4711 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711456 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711461 4711 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711466 4711 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711472 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711480 4711 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711487 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711493 4711 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711499 4711 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711531 4711 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711537 4711 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711543 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711548 4711 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711553 4711 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711558 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711563 4711 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711569 4711 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711574 4711 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711579 4711 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711584 4711 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711591 4711 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711596 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711603 4711 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711609 4711 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711615 4711 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711620 4711 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711626 4711 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711632 4711 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711641 4711 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711648 4711 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711653 4711 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711659 4711 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711664 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.711670 4711 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712046 4711 flags.go:64] FLAG: --address="0.0.0.0" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712065 4711 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712076 4711 flags.go:64] FLAG: --anonymous-auth="true" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712085 4711 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712093 4711 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712100 4711 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712109 4711 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712116 4711 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712124 4711 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712132 4711 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712147 4711 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712159 4711 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712168 4711 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712176 4711 flags.go:64] FLAG: --cgroup-root="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712184 4711 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712192 4711 flags.go:64] FLAG: --client-ca-file="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712200 4711 flags.go:64] FLAG: --cloud-config="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712208 4711 flags.go:64] FLAG: --cloud-provider="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712216 4711 flags.go:64] FLAG: --cluster-dns="[]" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712229 4711 flags.go:64] FLAG: --cluster-domain="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712237 4711 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712245 4711 flags.go:64] FLAG: --config-dir="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712253 4711 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712262 4711 flags.go:64] FLAG: --container-log-max-files="5" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712272 4711 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712279 4711 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712285 4711 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712292 4711 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712301 4711 flags.go:64] FLAG: --contention-profiling="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712309 4711 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712316 4711 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712324 4711 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712331 4711 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712339 4711 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712345 4711 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712351 4711 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712359 4711 flags.go:64] FLAG: --enable-load-reader="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712365 4711 flags.go:64] FLAG: --enable-server="true" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712371 4711 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712380 4711 flags.go:64] FLAG: --event-burst="100" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712387 4711 flags.go:64] FLAG: --event-qps="50" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712393 4711 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712399 4711 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712405 4711 flags.go:64] FLAG: --eviction-hard="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712413 4711 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712419 4711 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712426 4711 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712433 4711 flags.go:64] FLAG: --eviction-soft="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712439 4711 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712446 4711 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712452 4711 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712458 4711 flags.go:64] FLAG: --experimental-mounter-path="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712464 4711 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712470 4711 flags.go:64] FLAG: --fail-swap-on="true" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712476 4711 flags.go:64] FLAG: --feature-gates="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712484 4711 flags.go:64] FLAG: --file-check-frequency="20s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712490 4711 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712496 4711 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712502 4711 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712533 4711 flags.go:64] FLAG: --healthz-port="10248" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712541 4711 flags.go:64] FLAG: --help="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712547 4711 flags.go:64] FLAG: --hostname-override="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712553 4711 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712562 4711 flags.go:64] FLAG: --http-check-frequency="20s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712568 4711 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712575 4711 flags.go:64] FLAG: --image-credential-provider-config="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712581 4711 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712587 4711 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712593 4711 flags.go:64] FLAG: --image-service-endpoint="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712599 4711 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712606 4711 flags.go:64] FLAG: --kube-api-burst="100" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712612 4711 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712620 4711 flags.go:64] FLAG: --kube-api-qps="50" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712627 4711 flags.go:64] FLAG: --kube-reserved="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712633 4711 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712639 4711 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712645 4711 flags.go:64] FLAG: --kubelet-cgroups="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712651 4711 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712658 4711 flags.go:64] FLAG: --lock-file="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712663 4711 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712670 4711 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712678 4711 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712690 4711 flags.go:64] FLAG: --log-json-split-stream="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712697 4711 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712705 4711 flags.go:64] FLAG: --log-text-split-stream="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712712 4711 flags.go:64] FLAG: --logging-format="text" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712720 4711 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712728 4711 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712736 4711 flags.go:64] FLAG: --manifest-url="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712743 4711 flags.go:64] FLAG: --manifest-url-header="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712753 4711 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712760 4711 flags.go:64] FLAG: --max-open-files="1000000" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712771 4711 flags.go:64] FLAG: --max-pods="110" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712779 4711 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712787 4711 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712796 4711 flags.go:64] FLAG: --memory-manager-policy="None" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712806 4711 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712814 4711 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712823 4711 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712831 4711 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712852 4711 flags.go:64] FLAG: --node-status-max-images="50" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712859 4711 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712867 4711 flags.go:64] FLAG: --oom-score-adj="-999" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712875 4711 flags.go:64] FLAG: --pod-cidr="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712882 4711 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712892 4711 flags.go:64] FLAG: --pod-manifest-path="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712899 4711 flags.go:64] FLAG: --pod-max-pids="-1" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712906 4711 flags.go:64] FLAG: --pods-per-core="0" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712914 4711 flags.go:64] FLAG: --port="10250" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712928 4711 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712940 4711 flags.go:64] FLAG: --provider-id="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712949 4711 flags.go:64] FLAG: --qos-reserved="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712957 4711 flags.go:64] FLAG: --read-only-port="10255" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712966 4711 flags.go:64] FLAG: --register-node="true" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712974 4711 flags.go:64] FLAG: --register-schedulable="true" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.712982 4711 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713005 4711 flags.go:64] FLAG: --registry-burst="10" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713014 4711 flags.go:64] FLAG: --registry-qps="5" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713022 4711 flags.go:64] FLAG: --reserved-cpus="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713029 4711 flags.go:64] FLAG: --reserved-memory="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713039 4711 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713046 4711 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713055 4711 flags.go:64] FLAG: --rotate-certificates="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713063 4711 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713071 4711 flags.go:64] FLAG: --runonce="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713079 4711 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713087 4711 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713095 4711 flags.go:64] FLAG: --seccomp-default="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713104 4711 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713112 4711 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713121 4711 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713129 4711 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713137 4711 flags.go:64] FLAG: --storage-driver-password="root" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713145 4711 flags.go:64] FLAG: --storage-driver-secure="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713152 4711 flags.go:64] FLAG: --storage-driver-table="stats" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713160 4711 flags.go:64] FLAG: --storage-driver-user="root" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713168 4711 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713176 4711 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713184 4711 flags.go:64] FLAG: --system-cgroups="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713192 4711 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713206 4711 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713214 4711 flags.go:64] FLAG: --tls-cert-file="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713222 4711 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713235 4711 flags.go:64] FLAG: --tls-min-version="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713243 4711 flags.go:64] FLAG: --tls-private-key-file="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713251 4711 flags.go:64] FLAG: --topology-manager-policy="none" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713260 4711 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713269 4711 flags.go:64] FLAG: --topology-manager-scope="container" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713278 4711 flags.go:64] FLAG: --v="2" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713289 4711 flags.go:64] FLAG: --version="false" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713299 4711 flags.go:64] FLAG: --vmodule="" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713310 4711 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713318 4711 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713545 4711 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713557 4711 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713563 4711 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713569 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713575 4711 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713582 4711 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713588 4711 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713594 4711 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713600 4711 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713607 4711 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713614 4711 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713622 4711 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713628 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713633 4711 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713639 4711 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713644 4711 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713650 4711 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713658 4711 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713664 4711 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713672 4711 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713678 4711 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713684 4711 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713690 4711 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713695 4711 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713700 4711 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713706 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713717 4711 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713723 4711 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713729 4711 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713734 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713739 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713745 4711 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713751 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713757 4711 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713762 4711 feature_gate.go:330] unrecognized feature gate: Example Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713767 4711 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713773 4711 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713778 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713783 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713789 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713794 4711 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713799 4711 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713807 4711 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713813 4711 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713818 4711 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713823 4711 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713829 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713835 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713841 4711 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713847 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713853 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713858 4711 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713863 4711 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713869 4711 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713876 4711 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713883 4711 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713889 4711 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713896 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713906 4711 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713912 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713918 4711 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713926 4711 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713933 4711 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713941 4711 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713948 4711 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713954 4711 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713960 4711 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713965 4711 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713971 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713976 4711 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.713981 4711 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.713993 4711 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.737823 4711 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.737895 4711 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738079 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738097 4711 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738107 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738116 4711 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738125 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738132 4711 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738140 4711 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738148 4711 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738193 4711 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738203 4711 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738213 4711 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738223 4711 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738233 4711 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738242 4711 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738251 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738260 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738268 4711 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738277 4711 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738287 4711 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738298 4711 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738312 4711 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738325 4711 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738336 4711 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738347 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738358 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738368 4711 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738378 4711 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738389 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738399 4711 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738409 4711 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738418 4711 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738428 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738443 4711 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738458 4711 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738469 4711 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738477 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738486 4711 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738494 4711 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738502 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738539 4711 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738547 4711 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738556 4711 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738566 4711 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738574 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738583 4711 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738592 4711 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738600 4711 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738612 4711 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738622 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738632 4711 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738640 4711 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738648 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738657 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738665 4711 feature_gate.go:330] unrecognized feature gate: Example Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738673 4711 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738681 4711 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738689 4711 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738697 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738705 4711 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738713 4711 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738721 4711 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738729 4711 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738736 4711 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738747 4711 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738757 4711 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738766 4711 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738775 4711 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738783 4711 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738791 4711 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738799 4711 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.738807 4711 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.738822 4711 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739061 4711 feature_gate.go:330] unrecognized feature gate: Example Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739076 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739086 4711 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739096 4711 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739105 4711 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739114 4711 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739125 4711 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739138 4711 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739148 4711 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739158 4711 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739168 4711 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739178 4711 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739188 4711 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739199 4711 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739208 4711 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739217 4711 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739226 4711 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739236 4711 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739244 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739252 4711 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739261 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739268 4711 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739277 4711 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739285 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739293 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739301 4711 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739308 4711 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739316 4711 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739326 4711 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739336 4711 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739346 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739355 4711 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739365 4711 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739375 4711 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739384 4711 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739394 4711 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739404 4711 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739414 4711 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739425 4711 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739434 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739442 4711 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739450 4711 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739457 4711 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739466 4711 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739476 4711 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739487 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739498 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739536 4711 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739549 4711 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739559 4711 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739570 4711 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739581 4711 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739591 4711 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739603 4711 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739613 4711 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739625 4711 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739636 4711 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739647 4711 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739659 4711 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739669 4711 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739680 4711 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739691 4711 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739701 4711 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739712 4711 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739721 4711 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739731 4711 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739741 4711 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739751 4711 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739761 4711 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739769 4711 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.739776 4711 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.739792 4711 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.740390 4711 server.go:940] "Client rotation is on, will bootstrap in background" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.745260 4711 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.745413 4711 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.746397 4711 server.go:997] "Starting client certificate rotation" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.746444 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.746826 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-06 09:26:41.749169543 +0000 UTC Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.747042 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.755199 4711 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 23 08:20:14 crc kubenswrapper[4711]: E0123 08:20:14.758646 4711 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.758848 4711 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.774153 4711 log.go:25] "Validated CRI v1 runtime API" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.819633 4711 log.go:25] "Validated CRI v1 image API" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.821716 4711 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.824683 4711 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-23-08-16-33-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.824867 4711 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.876378 4711 manager.go:217] Machine: {Timestamp:2026-01-23 08:20:14.874066591 +0000 UTC m=+0.447023039 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654132736 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:48b4ba17-ccdd-4448-a1d6-e418ae5877df BootID:27dc1c1b-2120-44f4-9f06-7adb0d52081c Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730829824 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827068416 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:1f:58:68 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:1f:58:68 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:b6:ac:88 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:9e:4e:e0 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:d6:ac:c1 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:a0:95:28 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:2a:23:f3:2f:39:d9 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:92:9f:d2:18:05:14 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654132736 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.876907 4711 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.877330 4711 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.877928 4711 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.878210 4711 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.878264 4711 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.878617 4711 topology_manager.go:138] "Creating topology manager with none policy" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.878631 4711 container_manager_linux.go:303] "Creating device plugin manager" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.878891 4711 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.878937 4711 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.879289 4711 state_mem.go:36] "Initialized new in-memory state store" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.879528 4711 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.904936 4711 kubelet.go:418] "Attempting to sync node with API server" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.905012 4711 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.905062 4711 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.905084 4711 kubelet.go:324] "Adding apiserver pod source" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.905108 4711 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.907558 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:14 crc kubenswrapper[4711]: E0123 08:20:14.907738 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:14 crc kubenswrapper[4711]: W0123 08:20:14.907996 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:14 crc kubenswrapper[4711]: E0123 08:20:14.908237 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.908672 4711 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 23 08:20:14 crc kubenswrapper[4711]: I0123 08:20:14.909246 4711 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.054724 4711 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.055733 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.055763 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.055771 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.055780 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.055793 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.055809 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.055822 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.055838 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.055853 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.055863 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.055879 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.055887 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.056096 4711 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.056635 4711 server.go:1280] "Started kubelet" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.058268 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.058843 4711 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.059148 4711 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.059729 4711 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.060065 4711 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.220:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188d4e62d78d71f5 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-23 08:20:15.056605685 +0000 UTC m=+0.629562053,LastTimestamp:2026-01-23 08:20:15.056605685 +0000 UTC m=+0.629562053,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.062835 4711 server.go:460] "Adding debug handlers to kubelet server" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.063868 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.063957 4711 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.065248 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 08:57:33.474428335 +0000 UTC Jan 23 08:20:15 crc systemd[1]: Started Kubernetes Kubelet. Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.184009 4711 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.184040 4711 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.184248 4711 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.185072 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.285209 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 23 08:20:15 crc kubenswrapper[4711]: W0123 08:20:15.189407 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.314158 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.317962 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="200ms" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326178 4711 factory.go:55] Registering systemd factory Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326211 4711 factory.go:221] Registration of the systemd container factory successfully Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326536 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326600 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326613 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326624 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326634 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326644 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326656 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326666 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326688 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326701 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326711 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326722 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326733 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326744 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326753 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326764 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326774 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326803 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326812 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326821 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326834 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326846 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326857 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326886 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326918 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326929 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326941 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326958 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326970 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326982 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.326992 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327003 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327027 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327038 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327048 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327059 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327107 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327119 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327141 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327151 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327163 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327173 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327185 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327196 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327205 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327216 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327227 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327238 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327240 4711 factory.go:153] Registering CRI-O factory Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327275 4711 factory.go:221] Registration of the crio container factory successfully Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327249 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327382 4711 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327419 4711 factory.go:103] Registering Raw factory Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327437 4711 manager.go:1196] Started watching for new ooms in manager Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327373 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327483 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327496 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327546 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.327559 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.328269 4711 manager.go:319] Starting recovery of all containers Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330144 4711 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330187 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330203 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330214 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330226 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330236 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330246 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330257 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330267 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330277 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330288 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330299 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330312 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330321 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330332 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330342 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330353 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330366 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330377 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330390 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330402 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330414 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330426 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330439 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330456 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330482 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330497 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330524 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330539 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330552 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330565 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330577 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330589 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330600 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330614 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330627 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330637 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330648 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330658 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330673 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330684 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330696 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330709 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330727 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330740 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330751 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330762 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330775 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330786 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330797 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330810 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330827 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330840 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330852 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330864 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330876 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330886 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330897 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330910 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330922 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330937 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330949 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330960 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330973 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330984 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.330995 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331006 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331016 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331027 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331039 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331051 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331061 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331105 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331118 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331129 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331141 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331153 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331163 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331173 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331190 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331201 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331214 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331224 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331237 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331249 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331260 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331271 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331282 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331292 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331302 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331312 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331323 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331337 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331353 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331363 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331375 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331389 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331399 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331412 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331424 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331437 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331460 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331476 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.331490 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333202 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333247 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333262 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333275 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333309 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333321 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333334 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333347 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333360 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333371 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333410 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333438 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333532 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333554 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333566 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333580 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333625 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333665 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333678 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333690 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333716 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333728 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333739 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333750 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333761 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333799 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333815 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333837 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333901 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333920 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333938 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333955 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333971 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.333987 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334037 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334054 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334132 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334144 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334194 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334214 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334227 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334244 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334265 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334278 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334328 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334340 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334354 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334370 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334383 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334395 4711 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334408 4711 reconstruct.go:97] "Volume reconstruction finished" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.334418 4711 reconciler.go:26] "Reconciler: start to sync state" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.347798 4711 manager.go:324] Recovery completed Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.364261 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.385954 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.386430 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.386473 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.386485 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.388042 4711 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.388086 4711 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.388118 4711 state_mem.go:36] "Initialized new in-memory state store" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.440012 4711 policy_none.go:49] "None policy: Start" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.441273 4711 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.441333 4711 state_mem.go:35] "Initializing new in-memory state store" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.470519 4711 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.472339 4711 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.472400 4711 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.472431 4711 kubelet.go:2335] "Starting kubelet main sync loop" Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.472480 4711 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 23 08:20:15 crc kubenswrapper[4711]: W0123 08:20:15.473695 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.473803 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.486410 4711 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.515250 4711 manager.go:334] "Starting Device Plugin manager" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.515329 4711 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.515344 4711 server.go:79] "Starting device plugin registration server" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.515946 4711 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.515984 4711 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.516422 4711 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.516546 4711 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.516565 4711 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.519695 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="400ms" Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.526232 4711 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.572619 4711 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.572740 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.574106 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.574137 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.574150 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.574285 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.574732 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.574780 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.575336 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.575385 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.575404 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.575407 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.575485 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.575498 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.575714 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.575781 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.575810 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.576869 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.576896 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.576915 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.576920 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.576938 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.576924 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.577120 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.577362 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.577418 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.578235 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.578284 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.578300 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.578564 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.578636 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.578666 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.579324 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.579342 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.579353 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.579573 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.579593 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.579604 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.580211 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.580255 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.580271 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.580617 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.580666 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.581650 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.581690 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.581705 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.618776 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.621126 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.621185 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.621199 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.621248 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.621987 4711 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.638344 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.638420 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.638467 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.638528 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.638647 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.638743 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.638847 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.638911 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.638975 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.639029 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.639062 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.639099 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.639172 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.639225 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.639284 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741326 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741444 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741480 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741542 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741571 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741596 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741620 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741656 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741618 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741751 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741680 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741697 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741777 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741847 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741860 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741883 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741885 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741849 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741904 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741916 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741926 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741759 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741817 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741942 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741949 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.741998 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.742019 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.742008 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.742676 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.807710 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.822253 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.824383 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.824445 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.824458 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.824494 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.825222 4711 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Jan 23 08:20:15 crc kubenswrapper[4711]: W0123 08:20:15.842197 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.842353 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.913015 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: E0123 08:20:15.920726 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="800ms" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.942076 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: W0123 08:20:15.942875 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-8f8585583f10c798fe6696371062ab994d06b98664f2f57d0a46b1ecaf25d852 WatchSource:0}: Error finding container 8f8585583f10c798fe6696371062ab994d06b98664f2f57d0a46b1ecaf25d852: Status 404 returned error can't find the container with id 8f8585583f10c798fe6696371062ab994d06b98664f2f57d0a46b1ecaf25d852 Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.962488 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: W0123 08:20:15.962890 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-0a85d0346936ca54eb391dc9df837298a1062172784712ce045ea7fc0e9cdb02 WatchSource:0}: Error finding container 0a85d0346936ca54eb391dc9df837298a1062172784712ce045ea7fc0e9cdb02: Status 404 returned error can't find the container with id 0a85d0346936ca54eb391dc9df837298a1062172784712ce045ea7fc0e9cdb02 Jan 23 08:20:15 crc kubenswrapper[4711]: W0123 08:20:15.977340 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-e74110ee116be0bffa33c76b89874f8ef81202714e95614e0e9b73ba9d5c8761 WatchSource:0}: Error finding container e74110ee116be0bffa33c76b89874f8ef81202714e95614e0e9b73ba9d5c8761: Status 404 returned error can't find the container with id e74110ee116be0bffa33c76b89874f8ef81202714e95614e0e9b73ba9d5c8761 Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.983651 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 23 08:20:15 crc kubenswrapper[4711]: I0123 08:20:15.991588 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 23 08:20:16 crc kubenswrapper[4711]: W0123 08:20:16.001724 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-77da46c9582e39be19c83d82c18ead803ee248c3867820271a8b779aea2c4d8f WatchSource:0}: Error finding container 77da46c9582e39be19c83d82c18ead803ee248c3867820271a8b779aea2c4d8f: Status 404 returned error can't find the container with id 77da46c9582e39be19c83d82c18ead803ee248c3867820271a8b779aea2c4d8f Jan 23 08:20:16 crc kubenswrapper[4711]: W0123 08:20:16.007523 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-7ae95913e9eda8db7ea635c6eaf244fb4bc5fbdf576075b59cb67f09f956a3c5 WatchSource:0}: Error finding container 7ae95913e9eda8db7ea635c6eaf244fb4bc5fbdf576075b59cb67f09f956a3c5: Status 404 returned error can't find the container with id 7ae95913e9eda8db7ea635c6eaf244fb4bc5fbdf576075b59cb67f09f956a3c5 Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.059776 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.065927 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 10:50:32.780939047 +0000 UTC Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.225358 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.227011 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.227061 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.227083 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.227128 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 23 08:20:16 crc kubenswrapper[4711]: E0123 08:20:16.227670 4711 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Jan 23 08:20:16 crc kubenswrapper[4711]: W0123 08:20:16.426087 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:16 crc kubenswrapper[4711]: E0123 08:20:16.426451 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:16 crc kubenswrapper[4711]: W0123 08:20:16.429564 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:16 crc kubenswrapper[4711]: E0123 08:20:16.429640 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:16 crc kubenswrapper[4711]: W0123 08:20:16.429617 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:16 crc kubenswrapper[4711]: E0123 08:20:16.429905 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.485366 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d"} Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.485518 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0a85d0346936ca54eb391dc9df837298a1062172784712ce045ea7fc0e9cdb02"} Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.487855 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8f8585583f10c798fe6696371062ab994d06b98664f2f57d0a46b1ecaf25d852"} Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.488940 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7ae95913e9eda8db7ea635c6eaf244fb4bc5fbdf576075b59cb67f09f956a3c5"} Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.490052 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"77da46c9582e39be19c83d82c18ead803ee248c3867820271a8b779aea2c4d8f"} Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.491282 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac"} Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.491310 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e74110ee116be0bffa33c76b89874f8ef81202714e95614e0e9b73ba9d5c8761"} Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.491451 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.492653 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.492685 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.492696 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:16 crc kubenswrapper[4711]: E0123 08:20:16.722275 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="1.6s" Jan 23 08:20:16 crc kubenswrapper[4711]: I0123 08:20:16.918186 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 23 08:20:16 crc kubenswrapper[4711]: E0123 08:20:16.919354 4711 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.027949 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.030895 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.030949 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.030974 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.031009 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 23 08:20:17 crc kubenswrapper[4711]: E0123 08:20:17.031601 4711 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.059742 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.066120 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 14:31:22.234089575 +0000 UTC Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.594425 4711 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac" exitCode=0 Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.594561 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac"} Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.594732 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.595832 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.595873 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.595892 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.601159 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444"} Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.601225 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc"} Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.601250 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429"} Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.601393 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.602709 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.602747 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.602764 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.626582 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99" exitCode=0 Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.626718 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.626734 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99"} Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.628563 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.628612 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.628632 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.630936 4711 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575" exitCode=0 Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.631017 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575"} Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.631122 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.631142 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.632688 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.632737 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.632753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.633134 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.633177 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"3bc864465d2dc98d051395f369a6ca2628f21b5f91ec4fa7f8432e1b28aa5e93"} Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.633153 4711 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="3bc864465d2dc98d051395f369a6ca2628f21b5f91ec4fa7f8432e1b28aa5e93" exitCode=0 Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.633212 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.633237 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.633254 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.634172 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.634217 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.634232 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:17 crc kubenswrapper[4711]: I0123 08:20:17.646019 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:17 crc kubenswrapper[4711]: W0123 08:20:17.836820 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:17 crc kubenswrapper[4711]: E0123 08:20:17.836935 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.173197 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 12:28:51.165682219 +0000 UTC Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.173895 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:18 crc kubenswrapper[4711]: E0123 08:20:18.324050 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="3.2s" Jan 23 08:20:18 crc kubenswrapper[4711]: W0123 08:20:18.328620 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:18 crc kubenswrapper[4711]: E0123 08:20:18.328701 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.632537 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.636350 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.636394 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.636600 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.636821 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 23 08:20:18 crc kubenswrapper[4711]: E0123 08:20:18.638081 4711 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.220:6443: connect: connection refused" node="crc" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.644393 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"05ca21b49a07b82a6e6767f1839594c7ead4801d43541cce740a12b11dca6f3e"} Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.644645 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.645607 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.645715 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.645731 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.654185 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594"} Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.654259 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51"} Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.654277 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2"} Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.654405 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.655665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.655715 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.655728 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.658705 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801"} Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.658767 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5"} Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.658790 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca"} Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.661599 4711 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a" exitCode=0 Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.661727 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.662234 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.662532 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a"} Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.663058 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.663087 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.663098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.664141 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.664185 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:18 crc kubenswrapper[4711]: I0123 08:20:18.664202 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:18 crc kubenswrapper[4711]: W0123 08:20:18.966651 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:18 crc kubenswrapper[4711]: E0123 08:20:18.966772 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.220:6443: connect: connection refused" logger="UnhandledError" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.058915 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.220:6443: connect: connection refused Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.173556 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 07:13:59.258814533 +0000 UTC Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.667127 4711 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd" exitCode=0 Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.667233 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd"} Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.667261 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.668161 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.668191 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.668202 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.672140 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f"} Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.672187 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.672195 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de"} Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.672214 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.672230 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.672256 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.672379 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.674383 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.674419 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.674431 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.674606 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.674653 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.674677 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.675251 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.675271 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.675282 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.675429 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.675448 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:19 crc kubenswrapper[4711]: I0123 08:20:19.675458 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:20 crc kubenswrapper[4711]: I0123 08:20:20.174699 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 12:48:23.867726697 +0000 UTC Jan 23 08:20:20 crc kubenswrapper[4711]: I0123 08:20:20.680127 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a"} Jan 23 08:20:20 crc kubenswrapper[4711]: I0123 08:20:20.680195 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f"} Jan 23 08:20:20 crc kubenswrapper[4711]: I0123 08:20:20.680212 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5"} Jan 23 08:20:20 crc kubenswrapper[4711]: I0123 08:20:20.680226 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0"} Jan 23 08:20:20 crc kubenswrapper[4711]: I0123 08:20:20.680255 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 23 08:20:20 crc kubenswrapper[4711]: I0123 08:20:20.680297 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:20 crc kubenswrapper[4711]: I0123 08:20:20.682061 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:20 crc kubenswrapper[4711]: I0123 08:20:20.682179 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:20 crc kubenswrapper[4711]: I0123 08:20:20.682198 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:20 crc kubenswrapper[4711]: I0123 08:20:20.783618 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.175919 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 20:56:15.121149063 +0000 UTC Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.316970 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.460235 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.687652 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f"} Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.687727 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.687747 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.687801 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.688910 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.688965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.688985 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.689327 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.689403 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.689430 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.838201 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.839862 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.839900 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.839909 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:21 crc kubenswrapper[4711]: I0123 08:20:21.839933 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 23 08:20:22 crc kubenswrapper[4711]: I0123 08:20:22.176701 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 15:55:25.268549702 +0000 UTC Jan 23 08:20:22 crc kubenswrapper[4711]: I0123 08:20:22.690559 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 23 08:20:22 crc kubenswrapper[4711]: I0123 08:20:22.690628 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:22 crc kubenswrapper[4711]: I0123 08:20:22.690758 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:22 crc kubenswrapper[4711]: I0123 08:20:22.692025 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:22 crc kubenswrapper[4711]: I0123 08:20:22.692092 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:22 crc kubenswrapper[4711]: I0123 08:20:22.692123 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:22 crc kubenswrapper[4711]: I0123 08:20:22.692144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:22 crc kubenswrapper[4711]: I0123 08:20:22.692178 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:22 crc kubenswrapper[4711]: I0123 08:20:22.692196 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:23 crc kubenswrapper[4711]: I0123 08:20:23.176896 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 02:09:02.692269777 +0000 UTC Jan 23 08:20:24 crc kubenswrapper[4711]: I0123 08:20:24.177635 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 00:09:27.940583498 +0000 UTC Jan 23 08:20:24 crc kubenswrapper[4711]: I0123 08:20:24.294231 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 23 08:20:24 crc kubenswrapper[4711]: I0123 08:20:24.294489 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:24 crc kubenswrapper[4711]: I0123 08:20:24.296347 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:24 crc kubenswrapper[4711]: I0123 08:20:24.296409 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:24 crc kubenswrapper[4711]: I0123 08:20:24.296433 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.178716 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 11:09:35.485711531 +0000 UTC Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.441084 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.441329 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.443063 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.443115 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.443127 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.473931 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.474225 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.475888 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.475949 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.475974 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:25 crc kubenswrapper[4711]: E0123 08:20:25.526485 4711 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.725927 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.726154 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.727570 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.727603 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.727613 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.733071 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:25 crc kubenswrapper[4711]: I0123 08:20:25.738233 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:26 crc kubenswrapper[4711]: I0123 08:20:26.179676 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 23:41:04.628250911 +0000 UTC Jan 23 08:20:26 crc kubenswrapper[4711]: I0123 08:20:26.702872 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:26 crc kubenswrapper[4711]: I0123 08:20:26.703354 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:26 crc kubenswrapper[4711]: I0123 08:20:26.704667 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:26 crc kubenswrapper[4711]: I0123 08:20:26.704738 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:26 crc kubenswrapper[4711]: I0123 08:20:26.704779 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:26 crc kubenswrapper[4711]: I0123 08:20:26.716884 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:27 crc kubenswrapper[4711]: I0123 08:20:27.180874 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 10:49:20.334981853 +0000 UTC Jan 23 08:20:27 crc kubenswrapper[4711]: I0123 08:20:27.705818 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:27 crc kubenswrapper[4711]: I0123 08:20:27.707123 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:27 crc kubenswrapper[4711]: I0123 08:20:27.707187 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:27 crc kubenswrapper[4711]: I0123 08:20:27.707213 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:28 crc kubenswrapper[4711]: I0123 08:20:28.181239 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 01:25:04.139892205 +0000 UTC Jan 23 08:20:28 crc kubenswrapper[4711]: I0123 08:20:28.709684 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:28 crc kubenswrapper[4711]: I0123 08:20:28.710882 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:28 crc kubenswrapper[4711]: I0123 08:20:28.710929 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:28 crc kubenswrapper[4711]: I0123 08:20:28.710942 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:28 crc kubenswrapper[4711]: I0123 08:20:28.739257 4711 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 23 08:20:28 crc kubenswrapper[4711]: I0123 08:20:28.739366 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:20:29 crc kubenswrapper[4711]: I0123 08:20:29.182297 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 09:01:32.329402123 +0000 UTC Jan 23 08:20:29 crc kubenswrapper[4711]: W0123 08:20:29.416575 4711 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 23 08:20:29 crc kubenswrapper[4711]: I0123 08:20:29.416754 4711 trace.go:236] Trace[1063874858]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (23-Jan-2026 08:20:19.414) (total time: 10001ms): Jan 23 08:20:29 crc kubenswrapper[4711]: Trace[1063874858]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (08:20:29.416) Jan 23 08:20:29 crc kubenswrapper[4711]: Trace[1063874858]: [10.001736932s] [10.001736932s] END Jan 23 08:20:29 crc kubenswrapper[4711]: E0123 08:20:29.416787 4711 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 23 08:20:29 crc kubenswrapper[4711]: I0123 08:20:29.775753 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 23 08:20:29 crc kubenswrapper[4711]: I0123 08:20:29.776058 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:29 crc kubenswrapper[4711]: I0123 08:20:29.777923 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:29 crc kubenswrapper[4711]: I0123 08:20:29.777973 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:29 crc kubenswrapper[4711]: I0123 08:20:29.777988 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:30 crc kubenswrapper[4711]: I0123 08:20:30.061523 4711 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 23 08:20:30 crc kubenswrapper[4711]: I0123 08:20:30.183279 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 17:33:28.641101677 +0000 UTC Jan 23 08:20:30 crc kubenswrapper[4711]: I0123 08:20:30.449810 4711 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 23 08:20:30 crc kubenswrapper[4711]: I0123 08:20:30.450188 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 23 08:20:30 crc kubenswrapper[4711]: I0123 08:20:30.464107 4711 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 23 08:20:30 crc kubenswrapper[4711]: I0123 08:20:30.464228 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 23 08:20:31 crc kubenswrapper[4711]: I0123 08:20:31.184128 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 04:46:41.998009023 +0000 UTC Jan 23 08:20:31 crc kubenswrapper[4711]: I0123 08:20:31.475482 4711 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]log ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]etcd ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/openshift.io-api-request-count-filter ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/openshift.io-startkubeinformers ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/generic-apiserver-start-informers ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/priority-and-fairness-config-consumer ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/priority-and-fairness-filter ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/start-apiextensions-informers ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/start-apiextensions-controllers ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/crd-informer-synced ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/start-system-namespaces-controller ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/start-cluster-authentication-info-controller ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/start-legacy-token-tracking-controller ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/start-service-ip-repair-controllers ok Jan 23 08:20:31 crc kubenswrapper[4711]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/scheduling/bootstrap-system-priority-classes ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/priority-and-fairness-config-producer ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/bootstrap-controller ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/start-kube-aggregator-informers ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/apiservice-status-local-available-controller ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/apiservice-status-remote-available-controller ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/apiservice-registration-controller ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/apiservice-wait-for-first-sync ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/apiservice-discovery-controller ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/kube-apiserver-autoregistration ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]autoregister-completion ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/apiservice-openapi-controller ok Jan 23 08:20:31 crc kubenswrapper[4711]: [+]poststarthook/apiservice-openapiv3-controller ok Jan 23 08:20:31 crc kubenswrapper[4711]: livez check failed Jan 23 08:20:31 crc kubenswrapper[4711]: I0123 08:20:31.475629 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 23 08:20:32 crc kubenswrapper[4711]: I0123 08:20:32.185324 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 08:14:25.896446461 +0000 UTC Jan 23 08:20:33 crc kubenswrapper[4711]: I0123 08:20:33.186478 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 09:19:28.569573587 +0000 UTC Jan 23 08:20:34 crc kubenswrapper[4711]: I0123 08:20:34.187579 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 20:16:00.386426238 +0000 UTC Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.188465 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 17:17:33.103843968 +0000 UTC Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.436364 4711 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 23 08:20:35 crc kubenswrapper[4711]: E0123 08:20:35.447548 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.449906 4711 trace.go:236] Trace[1730862268]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (23-Jan-2026 08:20:23.087) (total time: 12362ms): Jan 23 08:20:35 crc kubenswrapper[4711]: Trace[1730862268]: ---"Objects listed" error: 12362ms (08:20:35.449) Jan 23 08:20:35 crc kubenswrapper[4711]: Trace[1730862268]: [12.362055773s] [12.362055773s] END Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.450115 4711 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.451087 4711 trace.go:236] Trace[1950314252]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (23-Jan-2026 08:20:21.446) (total time: 14005ms): Jan 23 08:20:35 crc kubenswrapper[4711]: Trace[1950314252]: ---"Objects listed" error: 14004ms (08:20:35.450) Jan 23 08:20:35 crc kubenswrapper[4711]: Trace[1950314252]: [14.00500141s] [14.00500141s] END Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.451129 4711 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 23 08:20:35 crc kubenswrapper[4711]: E0123 08:20:35.452645 4711 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.453327 4711 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.460897 4711 trace.go:236] Trace[1621447801]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (23-Jan-2026 08:20:22.944) (total time: 12516ms): Jan 23 08:20:35 crc kubenswrapper[4711]: Trace[1621447801]: ---"Objects listed" error: 12516ms (08:20:35.460) Jan 23 08:20:35 crc kubenswrapper[4711]: Trace[1621447801]: [12.516650136s] [12.516650136s] END Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.460936 4711 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.466924 4711 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.610666 4711 csr.go:261] certificate signing request csr-l2rp9 is approved, waiting to be issued Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.632421 4711 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:37256->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.632499 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:37256->192.168.126.11:17697: read: connection reset by peer" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.632531 4711 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:37266->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.632557 4711 csr.go:257] certificate signing request csr-l2rp9 is issued Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.632609 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:37266->192.168.126.11:17697: read: connection reset by peer" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.733143 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.735315 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f" exitCode=255 Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.735383 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f"} Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.742215 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.747061 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.785403 4711 scope.go:117] "RemoveContainer" containerID="cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.922202 4711 apiserver.go:52] "Watching apiserver" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.927605 4711 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.927888 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.928536 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.928588 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.928593 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.928623 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.928634 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.928684 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 23 08:20:35 crc kubenswrapper[4711]: E0123 08:20:35.929078 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:35 crc kubenswrapper[4711]: E0123 08:20:35.929091 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:35 crc kubenswrapper[4711]: E0123 08:20:35.929288 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.930879 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.931456 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.931687 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.932120 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.932424 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.933453 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.933588 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.933644 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.938883 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.958900 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.980186 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.985164 4711 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 23 08:20:35 crc kubenswrapper[4711]: I0123 08:20:35.994251 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.006097 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.025916 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.038062 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056413 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056492 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056543 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056568 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056587 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056618 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056639 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056655 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056690 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056706 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056720 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056736 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056770 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056789 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056809 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056843 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056862 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056880 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056896 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.056926 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057111 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057157 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057179 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057198 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057216 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057250 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057269 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057287 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057320 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057337 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057356 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057329 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057389 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057411 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057429 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057446 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057482 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057542 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057559 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057574 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057606 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057626 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057645 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057662 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057694 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057714 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057730 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057763 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057780 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057802 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057822 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057855 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057872 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057891 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057909 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.057925 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058027 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058097 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058134 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058171 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058199 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058238 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058267 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058295 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058301 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058321 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058374 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058393 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058424 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058442 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058468 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058493 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058539 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058559 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058611 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058630 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058652 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058671 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058691 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058708 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058727 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058728 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058792 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058811 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058835 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058852 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058871 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058897 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058921 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058973 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.058993 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059010 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059033 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059057 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059073 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059089 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059105 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059192 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059303 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059346 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059328 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059364 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059441 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059454 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059488 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059529 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059549 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059568 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059588 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059608 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059630 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059647 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059663 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059680 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059686 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059696 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059745 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059774 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059811 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059842 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059858 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059875 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059911 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059941 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.059974 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060002 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060022 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060033 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060061 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060094 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060122 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060147 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060178 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060206 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060233 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060259 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060283 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060309 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060335 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060367 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060398 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060424 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060440 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060450 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060477 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060523 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060526 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060557 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060585 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060612 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060642 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060679 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060704 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.072698 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060612 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.060737 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:20:36.560708427 +0000 UTC m=+22.133665015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.079941 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.079976 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.079997 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080019 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080043 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080062 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080082 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080111 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080134 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080156 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080187 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080205 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080224 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080244 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080261 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080281 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080300 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080308 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080320 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080481 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080543 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080566 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080576 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080587 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080612 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080631 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080661 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080679 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.080728 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.081262 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.081752 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.081783 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.082037 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.082271 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.082391 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.082434 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.082522 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.082725 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.082795 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.082838 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.082881 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.060804 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.061047 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.061150 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.061725 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.062004 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.062257 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.062341 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.062551 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.062834 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.062949 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.063015 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.063205 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.063350 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.063484 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.063659 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.063900 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.063920 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.063961 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.063968 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.064047 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.064427 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.064668 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.064751 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.064838 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.064855 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.065065 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.065213 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.065222 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.065343 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.065354 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.085861 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.065528 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.065575 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.066039 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.066302 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.066932 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.067223 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.067430 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.067726 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.067990 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.068210 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.068979 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.069620 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.069920 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.070304 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.070444 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.070610 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.070939 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.071182 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.071314 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.071406 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.071563 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.071705 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.071772 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.071854 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.072027 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.072125 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.072333 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.072466 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.073239 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.073483 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.073673 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.073871 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.074048 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.074339 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.074532 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.075145 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.079394 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.079625 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.079847 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.084885 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.085111 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.085330 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.085550 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.086267 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.086560 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.086745 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.087251 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.087307 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.087417 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.087844 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.088310 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.088689 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.088735 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.089007 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.089255 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.089597 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.089642 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.089994 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.090247 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.090466 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091379 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091473 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091637 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091647 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091717 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091748 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091767 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091784 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091804 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091821 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091849 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091866 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091888 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091906 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091926 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091942 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091960 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091977 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.091998 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092016 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092032 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092050 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092069 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092089 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092106 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092124 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092144 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092160 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092180 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092199 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092251 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092281 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092303 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092327 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092349 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092370 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092390 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092409 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092430 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092451 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092471 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092492 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092552 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092578 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092681 4711 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092693 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092705 4711 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092714 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092724 4711 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092733 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092744 4711 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092757 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092768 4711 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092779 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092789 4711 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092799 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092809 4711 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092819 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092828 4711 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092839 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092849 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092859 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092869 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092885 4711 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092896 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092907 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092916 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092926 4711 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092935 4711 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092944 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092954 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092965 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092974 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092984 4711 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.092993 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093002 4711 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093013 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093027 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093037 4711 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093047 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093058 4711 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093068 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093077 4711 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093086 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093094 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093105 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093114 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093124 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093132 4711 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093141 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093149 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093158 4711 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093167 4711 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093176 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093185 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093196 4711 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093206 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093216 4711 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093225 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093237 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093247 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093256 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093266 4711 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093275 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093284 4711 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093293 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093302 4711 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093311 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093320 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093329 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093337 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093346 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093354 4711 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093364 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093372 4711 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093382 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093390 4711 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093399 4711 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093408 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093417 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.093426 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.095225 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.095836 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096093 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.089577 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096612 4711 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096635 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096659 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096671 4711 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096683 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096694 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096705 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096716 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096789 4711 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096829 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096856 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096871 4711 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096882 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096902 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096917 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096930 4711 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096941 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096951 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096975 4711 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096987 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.096999 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097010 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097021 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097033 4711 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097043 4711 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097054 4711 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097066 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097113 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097126 4711 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097137 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097196 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097207 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099284 4711 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099318 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099333 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099346 4711 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099360 4711 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099374 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099389 4711 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099402 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099415 4711 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099429 4711 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099442 4711 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099453 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097298 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097337 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.097485 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.098270 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.098336 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.098341 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.098701 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.098845 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099056 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099089 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099139 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099248 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099383 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099665 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099748 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.099843 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.100008 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.100241 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.100263 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.100312 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.100561 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.103542 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.103634 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.103940 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.104268 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.104582 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.104912 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.105367 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.105780 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.106135 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.107037 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.107236 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.107659 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.107955 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.108539 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.108712 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.109005 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.109135 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.109381 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.109621 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.109691 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.109873 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.110066 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.110247 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.110729 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.110756 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.110936 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.111351 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.111638 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.111938 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.112391 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.113820 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.113835 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.115220 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.115572 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.115936 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.115491 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.115562 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.115715 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.115811 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.115877 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.116069 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.116253 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.116584 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.116650 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:36.616631254 +0000 UTC m=+22.189587622 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.116744 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.117484 4711 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.118674 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.119111 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.118711 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.119087 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.119225 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:36.619193478 +0000 UTC m=+22.192149836 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.119364 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.120752 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.121313 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.128459 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.138928 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.165371 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.170075 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.178277 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.178611 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.178637 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.178655 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.178745 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:36.678717994 +0000 UTC m=+22.251674362 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.181131 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.185722 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.185763 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.185779 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.185852 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:36.68582995 +0000 UTC m=+22.258786308 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.189553 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 01:28:52.725685603 +0000 UTC Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.190607 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200011 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200282 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200420 4711 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200487 4711 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200597 4711 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200665 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200698 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200730 4711 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200819 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200833 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200846 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200856 4711 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200866 4711 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200877 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200888 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200897 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200907 4711 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200916 4711 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200925 4711 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200935 4711 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200944 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200954 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200965 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200975 4711 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200987 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200997 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201006 4711 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201015 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201024 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201035 4711 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201046 4711 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201056 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201067 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201078 4711 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201092 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201101 4711 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201112 4711 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201122 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201131 4711 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201141 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201151 4711 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201160 4711 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201170 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201179 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201191 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201201 4711 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201210 4711 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201219 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201228 4711 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201237 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201246 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201256 4711 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201265 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201274 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201283 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201293 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201302 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201311 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201320 4711 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201329 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201338 4711 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201350 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201362 4711 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201374 4711 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201386 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201398 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201411 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201422 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201432 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201445 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201455 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201463 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201472 4711 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201481 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201550 4711 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.201562 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.200203 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.205858 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.213797 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.232420 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.254626 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.267087 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.273380 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.279531 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.470478 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.495231 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.507029 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-gpch6"] Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.507427 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-gpch6" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.510878 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.511183 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.511267 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.511577 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.536481 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.552101 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.565195 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.579460 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.592035 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.604229 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.604337 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99g7g\" (UniqueName: \"kubernetes.io/projected/23cac305-d4a6-4543-9585-c9d46409e12b-kube-api-access-99g7g\") pod \"node-resolver-gpch6\" (UID: \"23cac305-d4a6-4543-9585-c9d46409e12b\") " pod="openshift-dns/node-resolver-gpch6" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.604379 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/23cac305-d4a6-4543-9585-c9d46409e12b-hosts-file\") pod \"node-resolver-gpch6\" (UID: \"23cac305-d4a6-4543-9585-c9d46409e12b\") " pod="openshift-dns/node-resolver-gpch6" Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.604518 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:20:37.604455949 +0000 UTC m=+23.177412347 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.612245 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.634035 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-23 08:15:35 +0000 UTC, rotation deadline is 2026-10-21 00:29:09.197926756 +0000 UTC Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.634122 4711 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6496h8m32.563807392s for next certificate rotation Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.705920 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.705979 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.706014 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/23cac305-d4a6-4543-9585-c9d46409e12b-hosts-file\") pod \"node-resolver-gpch6\" (UID: \"23cac305-d4a6-4543-9585-c9d46409e12b\") " pod="openshift-dns/node-resolver-gpch6" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.706051 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.706076 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.706100 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99g7g\" (UniqueName: \"kubernetes.io/projected/23cac305-d4a6-4543-9585-c9d46409e12b-kube-api-access-99g7g\") pod \"node-resolver-gpch6\" (UID: \"23cac305-d4a6-4543-9585-c9d46409e12b\") " pod="openshift-dns/node-resolver-gpch6" Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.706160 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.706222 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.706238 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.706251 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.706310 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:37.706291254 +0000 UTC m=+23.279247622 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.706412 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.706427 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.706432 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:37.706369736 +0000 UTC m=+23.279326304 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.706436 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.706519 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:37.706494329 +0000 UTC m=+23.279450937 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.706518 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/23cac305-d4a6-4543-9585-c9d46409e12b-hosts-file\") pod \"node-resolver-gpch6\" (UID: \"23cac305-d4a6-4543-9585-c9d46409e12b\") " pod="openshift-dns/node-resolver-gpch6" Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.706586 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.706616 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:37.706609471 +0000 UTC m=+23.279565839 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.742325 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.744313 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447"} Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.745076 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.747584 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a"} Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.747605 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81"} Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.747615 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b2fdff7ac28d51b421a0a435f45fcbdd62826437787ce8c47e2a737f28038363"} Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.749103 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"a3aa4cbcaa2ff0a93c4348ce473f38aff5bcb14468d402d91167923fa05b82de"} Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.750778 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0"} Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.750800 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"fc2033a442fcfb2e9cc70c2b5dd35fe16ee580aa59f9a4eebf751be67da37771"} Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.778188 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.789034 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: E0123 08:20:36.814839 4711 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.817981 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.823856 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99g7g\" (UniqueName: \"kubernetes.io/projected/23cac305-d4a6-4543-9585-c9d46409e12b-kube-api-access-99g7g\") pod \"node-resolver-gpch6\" (UID: \"23cac305-d4a6-4543-9585-c9d46409e12b\") " pod="openshift-dns/node-resolver-gpch6" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.825267 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-gpch6" Jan 23 08:20:36 crc kubenswrapper[4711]: W0123 08:20:36.840107 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23cac305_d4a6_4543_9585_c9d46409e12b.slice/crio-13aaddad486ae6c0c4ec52fd088971abcc255f78d21d41c88357466a7c069fbc WatchSource:0}: Error finding container 13aaddad486ae6c0c4ec52fd088971abcc255f78d21d41c88357466a7c069fbc: Status 404 returned error can't find the container with id 13aaddad486ae6c0c4ec52fd088971abcc255f78d21d41c88357466a7c069fbc Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.848823 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.865747 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.921102 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.976923 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:36 crc kubenswrapper[4711]: I0123 08:20:36.997671 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.023724 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.066119 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.083290 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.100140 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.114196 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.132460 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.157980 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.174451 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.190716 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 16:38:50.61284446 +0000 UTC Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.194421 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-2t9r8"] Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.195692 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.210563 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhnmh\" (UniqueName: \"kubernetes.io/projected/3846d4e0-cfda-4e0b-8747-85267de12736-kube-api-access-xhnmh\") pod \"machine-config-daemon-2t9r8\" (UID: \"3846d4e0-cfda-4e0b-8747-85267de12736\") " pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.210638 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3846d4e0-cfda-4e0b-8747-85267de12736-rootfs\") pod \"machine-config-daemon-2t9r8\" (UID: \"3846d4e0-cfda-4e0b-8747-85267de12736\") " pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.210808 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3846d4e0-cfda-4e0b-8747-85267de12736-proxy-tls\") pod \"machine-config-daemon-2t9r8\" (UID: \"3846d4e0-cfda-4e0b-8747-85267de12736\") " pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.210858 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3846d4e0-cfda-4e0b-8747-85267de12736-mcd-auth-proxy-config\") pod \"machine-config-daemon-2t9r8\" (UID: \"3846d4e0-cfda-4e0b-8747-85267de12736\") " pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.224999 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.225230 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.225292 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.228472 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.229000 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.229161 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.243988 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.256495 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.271425 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.295429 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.311667 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3846d4e0-cfda-4e0b-8747-85267de12736-proxy-tls\") pod \"machine-config-daemon-2t9r8\" (UID: \"3846d4e0-cfda-4e0b-8747-85267de12736\") " pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.311708 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3846d4e0-cfda-4e0b-8747-85267de12736-mcd-auth-proxy-config\") pod \"machine-config-daemon-2t9r8\" (UID: \"3846d4e0-cfda-4e0b-8747-85267de12736\") " pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.311747 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhnmh\" (UniqueName: \"kubernetes.io/projected/3846d4e0-cfda-4e0b-8747-85267de12736-kube-api-access-xhnmh\") pod \"machine-config-daemon-2t9r8\" (UID: \"3846d4e0-cfda-4e0b-8747-85267de12736\") " pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.311775 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3846d4e0-cfda-4e0b-8747-85267de12736-rootfs\") pod \"machine-config-daemon-2t9r8\" (UID: \"3846d4e0-cfda-4e0b-8747-85267de12736\") " pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.311824 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3846d4e0-cfda-4e0b-8747-85267de12736-rootfs\") pod \"machine-config-daemon-2t9r8\" (UID: \"3846d4e0-cfda-4e0b-8747-85267de12736\") " pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.311931 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.313030 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3846d4e0-cfda-4e0b-8747-85267de12736-mcd-auth-proxy-config\") pod \"machine-config-daemon-2t9r8\" (UID: \"3846d4e0-cfda-4e0b-8747-85267de12736\") " pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.317950 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3846d4e0-cfda-4e0b-8747-85267de12736-proxy-tls\") pod \"machine-config-daemon-2t9r8\" (UID: \"3846d4e0-cfda-4e0b-8747-85267de12736\") " pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.329273 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhnmh\" (UniqueName: \"kubernetes.io/projected/3846d4e0-cfda-4e0b-8747-85267de12736-kube-api-access-xhnmh\") pod \"machine-config-daemon-2t9r8\" (UID: \"3846d4e0-cfda-4e0b-8747-85267de12736\") " pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.330569 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.341552 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.355499 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.374989 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.402107 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.421496 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.443455 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.473762 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.473797 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.473797 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.473929 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.474091 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.474361 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.479535 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.480052 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.480926 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.481706 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.482270 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.482787 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.483434 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.484042 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.486277 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.486845 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.487397 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.488414 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.488945 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.489809 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.490319 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.491237 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.491796 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.492160 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.493078 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.493637 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.494074 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.495014 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.495424 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.496571 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.497052 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.498294 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.499477 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.500421 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.501657 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.502295 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.503356 4711 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.503546 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.505234 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.506226 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.506759 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.507047 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.508783 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.510047 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.510735 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.511994 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.512731 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.513278 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.514448 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.515750 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.516456 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.517573 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.518167 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.519210 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.520193 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.521260 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.524212 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.525074 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.527249 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.528271 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.529845 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.622909 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.623163 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:20:39.623119503 +0000 UTC m=+25.196075871 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.730821 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.730876 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.731092 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.731118 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.731145 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.731281 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.731338 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-955cc"] Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.731360 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:39.731333217 +0000 UTC m=+25.304289585 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.731470 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:39.73145616 +0000 UTC m=+25.304412608 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.731526 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.731596 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.731721 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.731736 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.731747 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.731757 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.731788 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:39.731778658 +0000 UTC m=+25.304735106 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:37 crc kubenswrapper[4711]: E0123 08:20:37.731829 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:39.731806708 +0000 UTC m=+25.304763276 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.732102 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-vpxkq"] Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.732277 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jmffw"] Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.732286 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.732990 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.733495 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.757769 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5"} Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.757827 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"2d3835f7c6119aff96658bdb7b1b81c30d7d2067fb93ef5c12b4e34a86c694c0"} Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.759861 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-gpch6" event={"ID":"23cac305-d4a6-4543-9585-c9d46409e12b","Type":"ContainerStarted","Data":"9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8"} Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.759891 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-gpch6" event={"ID":"23cac305-d4a6-4543-9585-c9d46409e12b","Type":"ContainerStarted","Data":"13aaddad486ae6c0c4ec52fd088971abcc255f78d21d41c88357466a7c069fbc"} Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.776045 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.779720 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.780040 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.780083 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.780423 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.781155 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.781339 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.781380 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.781547 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.781644 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.781663 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.781740 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.782056 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.800241 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832041 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-run-netns\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832101 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-cni-netd\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832125 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-run-netns\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832148 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-run-k8s-cni-cncf-io\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832175 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832195 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-env-overrides\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832213 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsn7w\" (UniqueName: \"kubernetes.io/projected/90368f73-4e1c-477c-a507-30d6108ac6a1-kube-api-access-gsn7w\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832245 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-etc-kubernetes\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832264 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pk2h\" (UniqueName: \"kubernetes.io/projected/8cc803a0-2626-4444-b4b2-8e9567277d44-kube-api-access-4pk2h\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832285 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-var-lib-openvswitch\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832320 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-node-log\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832338 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovn-node-metrics-cert\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832356 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovnkube-script-lib\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832373 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/90368f73-4e1c-477c-a507-30d6108ac6a1-system-cni-dir\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832390 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8cc803a0-2626-4444-b4b2-8e9567277d44-cni-binary-copy\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832408 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-system-cni-dir\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832430 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-run-multus-certs\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832450 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/90368f73-4e1c-477c-a507-30d6108ac6a1-cni-binary-copy\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832473 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-var-lib-cni-bin\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832494 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8cc803a0-2626-4444-b4b2-8e9567277d44-multus-daemon-config\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832604 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-systemd-units\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832646 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-systemd\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832681 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-cni-bin\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832709 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-multus-socket-dir-parent\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832735 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-var-lib-cni-multus\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832768 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-kubelet\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832783 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-run-ovn-kubernetes\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832799 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/90368f73-4e1c-477c-a507-30d6108ac6a1-os-release\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832816 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-log-socket\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.832969 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-multus-cni-dir\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833009 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovnkube-config\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833030 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/90368f73-4e1c-477c-a507-30d6108ac6a1-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833181 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-cnibin\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833253 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-multus-conf-dir\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833336 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-os-release\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833388 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/90368f73-4e1c-477c-a507-30d6108ac6a1-cnibin\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833464 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-var-lib-kubelet\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833525 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-slash\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833562 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-etc-openvswitch\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833623 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-openvswitch\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833671 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-hostroot\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833701 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-ovn\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833732 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mppnf\" (UniqueName: \"kubernetes.io/projected/e16bfd0e-30fd-4fcf-865b-63400b88cff3-kube-api-access-mppnf\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.833777 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/90368f73-4e1c-477c-a507-30d6108ac6a1-tuning-conf-dir\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.838824 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.856911 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.879927 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.906229 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.929098 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.935742 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-system-cni-dir\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.935802 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-run-multus-certs\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.935823 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/90368f73-4e1c-477c-a507-30d6108ac6a1-cni-binary-copy\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.935862 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-var-lib-cni-bin\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.935882 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8cc803a0-2626-4444-b4b2-8e9567277d44-multus-daemon-config\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.935900 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-systemd-units\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.935916 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-systemd\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.935970 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-cni-bin\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.935990 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-multus-socket-dir-parent\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936021 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-var-lib-cni-multus\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936039 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-kubelet\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936055 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-run-ovn-kubernetes\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936069 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/90368f73-4e1c-477c-a507-30d6108ac6a1-os-release\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936108 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-log-socket\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936141 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-multus-cni-dir\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936183 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovnkube-config\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936205 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/90368f73-4e1c-477c-a507-30d6108ac6a1-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936224 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-cnibin\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936257 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-multus-conf-dir\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936280 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-os-release\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936297 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/90368f73-4e1c-477c-a507-30d6108ac6a1-cnibin\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936335 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-var-lib-kubelet\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936351 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-slash\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936368 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-etc-openvswitch\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936384 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-openvswitch\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936418 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-hostroot\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936422 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-kubelet\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936436 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-ovn\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936495 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-ovn\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936500 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mppnf\" (UniqueName: \"kubernetes.io/projected/e16bfd0e-30fd-4fcf-865b-63400b88cff3-kube-api-access-mppnf\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936567 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/90368f73-4e1c-477c-a507-30d6108ac6a1-tuning-conf-dir\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936885 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-run-netns\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936946 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-cni-netd\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936983 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-run-netns\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937021 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-run-k8s-cni-cncf-io\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937068 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937113 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-env-overrides\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937149 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsn7w\" (UniqueName: \"kubernetes.io/projected/90368f73-4e1c-477c-a507-30d6108ac6a1-kube-api-access-gsn7w\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937141 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/90368f73-4e1c-477c-a507-30d6108ac6a1-os-release\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937179 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-openvswitch\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937263 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-etc-kubernetes\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937274 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-log-socket\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937295 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-etc-openvswitch\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937159 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-os-release\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937209 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-etc-kubernetes\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937310 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-slash\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937370 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pk2h\" (UniqueName: \"kubernetes.io/projected/8cc803a0-2626-4444-b4b2-8e9567277d44-kube-api-access-4pk2h\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937330 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-cni-bin\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937394 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-multus-socket-dir-parent\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937375 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-cnibin\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.936569 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-run-ovn-kubernetes\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937185 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/90368f73-4e1c-477c-a507-30d6108ac6a1-cnibin\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.937961 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-run-multus-certs\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938405 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-run-netns\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938437 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-var-lib-cni-bin\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938454 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-var-lib-cni-multus\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938427 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-hostroot\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938478 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-multus-conf-dir\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938495 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938538 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-systemd-units\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938542 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovnkube-config\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938566 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-run-k8s-cni-cncf-io\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938568 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-var-lib-openvswitch\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938597 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-var-lib-openvswitch\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938624 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-run-netns\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938666 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-node-log\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938704 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovn-node-metrics-cert\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938707 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/90368f73-4e1c-477c-a507-30d6108ac6a1-tuning-conf-dir\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938709 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-cni-netd\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938708 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-system-cni-dir\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938783 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-node-log\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938932 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-multus-cni-dir\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.938983 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8cc803a0-2626-4444-b4b2-8e9567277d44-host-var-lib-kubelet\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.939021 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovnkube-script-lib\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.939086 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/90368f73-4e1c-477c-a507-30d6108ac6a1-system-cni-dir\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.939109 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8cc803a0-2626-4444-b4b2-8e9567277d44-cni-binary-copy\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.939221 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/90368f73-4e1c-477c-a507-30d6108ac6a1-system-cni-dir\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.939855 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8cc803a0-2626-4444-b4b2-8e9567277d44-cni-binary-copy\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.939927 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8cc803a0-2626-4444-b4b2-8e9567277d44-multus-daemon-config\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.940308 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-env-overrides\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.940586 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovnkube-script-lib\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.941315 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-systemd\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.942788 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/90368f73-4e1c-477c-a507-30d6108ac6a1-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.944162 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/90368f73-4e1c-477c-a507-30d6108ac6a1-cni-binary-copy\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.953996 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovn-node-metrics-cert\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.964669 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsn7w\" (UniqueName: \"kubernetes.io/projected/90368f73-4e1c-477c-a507-30d6108ac6a1-kube-api-access-gsn7w\") pod \"multus-additional-cni-plugins-955cc\" (UID: \"90368f73-4e1c-477c-a507-30d6108ac6a1\") " pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.965955 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.966753 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pk2h\" (UniqueName: \"kubernetes.io/projected/8cc803a0-2626-4444-b4b2-8e9567277d44-kube-api-access-4pk2h\") pod \"multus-vpxkq\" (UID: \"8cc803a0-2626-4444-b4b2-8e9567277d44\") " pod="openshift-multus/multus-vpxkq" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.971251 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mppnf\" (UniqueName: \"kubernetes.io/projected/e16bfd0e-30fd-4fcf-865b-63400b88cff3-kube-api-access-mppnf\") pod \"ovnkube-node-jmffw\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.980963 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:37 crc kubenswrapper[4711]: I0123 08:20:37.998479 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:37Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.024794 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.038745 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.052540 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-955cc" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.057371 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.072464 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.086257 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.087805 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vpxkq" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.134841 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.135673 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.160107 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.187496 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.192184 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 16:10:58.664707789 +0000 UTC Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.209986 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.225417 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.312713 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.427144 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.548526 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.589209 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.614340 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.637497 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.765823 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpxkq" event={"ID":"8cc803a0-2626-4444-b4b2-8e9567277d44","Type":"ContainerStarted","Data":"e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6"} Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.765891 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpxkq" event={"ID":"8cc803a0-2626-4444-b4b2-8e9567277d44","Type":"ContainerStarted","Data":"d56034f2b808014b41c45fc704e460c0332d896e2cb717e003e624a3a769aeb3"} Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.769722 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed"} Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.771353 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" event={"ID":"90368f73-4e1c-477c-a507-30d6108ac6a1","Type":"ContainerStarted","Data":"b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409"} Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.771417 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" event={"ID":"90368f73-4e1c-477c-a507-30d6108ac6a1","Type":"ContainerStarted","Data":"587b787c005d71044c539a0ddf2b81623406b01d61f179daa88683424fb57533"} Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.776696 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc" exitCode=0 Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.776855 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc"} Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.776935 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"e6df4e19257cce69b10f5993d09781450de547988257ce794e0be35c6992f898"} Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.784708 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.804955 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.830991 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.847774 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.862376 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.876806 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.888592 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.904884 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.922289 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.942105 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.955991 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.975056 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:38 crc kubenswrapper[4711]: I0123 08:20:38.991020 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:38Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.005631 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.021193 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.041421 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.060298 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.078365 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.093267 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.115022 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.137777 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.152601 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.170530 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.188162 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.192874 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 11:15:07.032262755 +0000 UTC Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.200753 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.211473 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.475230 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.475623 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.476049 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.476108 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.476144 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.476189 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.664417 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.664752 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:20:43.66473059 +0000 UTC m=+29.237686958 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.765781 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.765845 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.765870 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.765896 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.765984 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.766061 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:43.766041952 +0000 UTC m=+29.338998310 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.766190 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.766227 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.766230 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.766320 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.766367 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.766244 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.766347 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:43.766323639 +0000 UTC m=+29.339279997 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.766441 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:43.766434281 +0000 UTC m=+29.339390649 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.766378 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:39 crc kubenswrapper[4711]: E0123 08:20:39.766482 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:43.766474542 +0000 UTC m=+29.339431130 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.784255 4711 generic.go:334] "Generic (PLEG): container finished" podID="90368f73-4e1c-477c-a507-30d6108ac6a1" containerID="b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409" exitCode=0 Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.784363 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" event={"ID":"90368f73-4e1c-477c-a507-30d6108ac6a1","Type":"ContainerDied","Data":"b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409"} Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.789836 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f"} Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.789885 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154"} Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.807590 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.808713 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.824883 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.826406 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.832623 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.843490 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.854311 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.873903 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.889907 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.913348 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.929184 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.942845 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.981491 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:39 crc kubenswrapper[4711]: I0123 08:20:39.999389 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:39Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.023930 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.045620 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.073463 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.101634 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.126749 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.193408 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 18:55:48.899540901 +0000 UTC Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.197195 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.215133 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.235852 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.273569 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.306813 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.326732 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.361044 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.382080 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.402520 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.413098 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.426621 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.492677 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-bkn9c"] Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.493140 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-bkn9c" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.494958 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.495491 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.495691 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.496279 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.507405 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.524243 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.546261 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.556889 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.566452 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.577044 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/80227686-5007-41d4-8d57-bcedb7564f78-serviceca\") pod \"node-ca-bkn9c\" (UID: \"80227686-5007-41d4-8d57-bcedb7564f78\") " pod="openshift-image-registry/node-ca-bkn9c" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.577110 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whkp6\" (UniqueName: \"kubernetes.io/projected/80227686-5007-41d4-8d57-bcedb7564f78-kube-api-access-whkp6\") pod \"node-ca-bkn9c\" (UID: \"80227686-5007-41d4-8d57-bcedb7564f78\") " pod="openshift-image-registry/node-ca-bkn9c" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.577292 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/80227686-5007-41d4-8d57-bcedb7564f78-host\") pod \"node-ca-bkn9c\" (UID: \"80227686-5007-41d4-8d57-bcedb7564f78\") " pod="openshift-image-registry/node-ca-bkn9c" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.577723 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.595567 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.611681 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.625441 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.638494 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.650427 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.663352 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.678242 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/80227686-5007-41d4-8d57-bcedb7564f78-host\") pod \"node-ca-bkn9c\" (UID: \"80227686-5007-41d4-8d57-bcedb7564f78\") " pod="openshift-image-registry/node-ca-bkn9c" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.678323 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/80227686-5007-41d4-8d57-bcedb7564f78-serviceca\") pod \"node-ca-bkn9c\" (UID: \"80227686-5007-41d4-8d57-bcedb7564f78\") " pod="openshift-image-registry/node-ca-bkn9c" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.678381 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whkp6\" (UniqueName: \"kubernetes.io/projected/80227686-5007-41d4-8d57-bcedb7564f78-kube-api-access-whkp6\") pod \"node-ca-bkn9c\" (UID: \"80227686-5007-41d4-8d57-bcedb7564f78\") " pod="openshift-image-registry/node-ca-bkn9c" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.678369 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.678575 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/80227686-5007-41d4-8d57-bcedb7564f78-host\") pod \"node-ca-bkn9c\" (UID: \"80227686-5007-41d4-8d57-bcedb7564f78\") " pod="openshift-image-registry/node-ca-bkn9c" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.679596 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/80227686-5007-41d4-8d57-bcedb7564f78-serviceca\") pod \"node-ca-bkn9c\" (UID: \"80227686-5007-41d4-8d57-bcedb7564f78\") " pod="openshift-image-registry/node-ca-bkn9c" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.701919 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.704210 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whkp6\" (UniqueName: \"kubernetes.io/projected/80227686-5007-41d4-8d57-bcedb7564f78-kube-api-access-whkp6\") pod \"node-ca-bkn9c\" (UID: \"80227686-5007-41d4-8d57-bcedb7564f78\") " pod="openshift-image-registry/node-ca-bkn9c" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.711192 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.795000 4711 generic.go:334] "Generic (PLEG): container finished" podID="90368f73-4e1c-477c-a507-30d6108ac6a1" containerID="fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80" exitCode=0 Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.795093 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" event={"ID":"90368f73-4e1c-477c-a507-30d6108ac6a1","Type":"ContainerDied","Data":"fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80"} Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.796943 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70"} Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.803674 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649"} Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.803737 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d"} Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.803756 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5"} Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.803766 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289"} Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.808950 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-bkn9c" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.817196 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.832125 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: W0123 08:20:40.841066 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80227686_5007_41d4_8d57_bcedb7564f78.slice/crio-41d2dc2f54e4ab57b92114e891e42f782d838651db0db0cc632134871ddb15bb WatchSource:0}: Error finding container 41d2dc2f54e4ab57b92114e891e42f782d838651db0db0cc632134871ddb15bb: Status 404 returned error can't find the container with id 41d2dc2f54e4ab57b92114e891e42f782d838651db0db0cc632134871ddb15bb Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.845685 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.862651 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.874743 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.895420 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.907866 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.927872 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.942446 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.956095 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.970195 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:40 crc kubenswrapper[4711]: I0123 08:20:40.993944 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:40Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.041779 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.075754 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.117016 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.153193 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.193746 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 16:04:59.151277716 +0000 UTC Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.195177 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.245272 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.273985 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.317935 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.351978 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.397716 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.444656 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.473082 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:41 crc kubenswrapper[4711]: E0123 08:20:41.473254 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.473689 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.473733 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:41 crc kubenswrapper[4711]: E0123 08:20:41.473812 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:41 crc kubenswrapper[4711]: E0123 08:20:41.473914 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.476451 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.513468 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.556606 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.596255 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.635053 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.672197 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.711398 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.811550 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-bkn9c" event={"ID":"80227686-5007-41d4-8d57-bcedb7564f78","Type":"ContainerStarted","Data":"31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933"} Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.811991 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-bkn9c" event={"ID":"80227686-5007-41d4-8d57-bcedb7564f78","Type":"ContainerStarted","Data":"41d2dc2f54e4ab57b92114e891e42f782d838651db0db0cc632134871ddb15bb"} Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.814935 4711 generic.go:334] "Generic (PLEG): container finished" podID="90368f73-4e1c-477c-a507-30d6108ac6a1" containerID="49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a" exitCode=0 Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.815013 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" event={"ID":"90368f73-4e1c-477c-a507-30d6108ac6a1","Type":"ContainerDied","Data":"49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a"} Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.834983 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.851029 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.853194 4711 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.855629 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.855691 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.855707 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.855839 4711 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.867218 4711 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.867620 4711 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.867764 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.871410 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.871468 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.871486 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.871528 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.871550 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:41Z","lastTransitionTime":"2026-01-23T08:20:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:41 crc kubenswrapper[4711]: E0123 08:20:41.890070 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.898744 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.898800 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.898810 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.898829 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.898840 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:41Z","lastTransitionTime":"2026-01-23T08:20:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:41 crc kubenswrapper[4711]: E0123 08:20:41.913824 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.920043 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.920080 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.920089 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.920104 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.920114 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:41Z","lastTransitionTime":"2026-01-23T08:20:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.922767 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: E0123 08:20:41.934854 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.939289 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.939346 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.939363 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.939414 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.939454 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:41Z","lastTransitionTime":"2026-01-23T08:20:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:41 crc kubenswrapper[4711]: E0123 08:20:41.953224 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.954385 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.957921 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.958002 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.958015 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.958037 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.958049 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:41Z","lastTransitionTime":"2026-01-23T08:20:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:41 crc kubenswrapper[4711]: E0123 08:20:41.973091 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:41 crc kubenswrapper[4711]: E0123 08:20:41.973245 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.975047 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.975085 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.975098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.975118 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:41 crc kubenswrapper[4711]: I0123 08:20:41.975132 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:41Z","lastTransitionTime":"2026-01-23T08:20:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.000561 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:41Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.035918 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.073190 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.078295 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.078328 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.078339 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.078358 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.078369 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:42Z","lastTransitionTime":"2026-01-23T08:20:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.116469 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.153125 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.181216 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.181293 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.181322 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.181347 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.181361 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:42Z","lastTransitionTime":"2026-01-23T08:20:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.193997 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 10:30:00.167004621 +0000 UTC Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.194879 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.232560 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.276258 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.284483 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.284565 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.284578 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.284600 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.284615 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:42Z","lastTransitionTime":"2026-01-23T08:20:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.310924 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.355718 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.387880 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.387946 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.387960 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.387986 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.388001 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:42Z","lastTransitionTime":"2026-01-23T08:20:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.395346 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.436696 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.475245 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.490485 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.490551 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.490564 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.490584 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.490597 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:42Z","lastTransitionTime":"2026-01-23T08:20:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.518951 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.556091 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.593434 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.594200 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.594253 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.594265 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.594285 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.594297 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:42Z","lastTransitionTime":"2026-01-23T08:20:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.645885 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.676814 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.696876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.696938 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.696954 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.696972 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.697243 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:42Z","lastTransitionTime":"2026-01-23T08:20:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.712104 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.756268 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.792486 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.800060 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.800105 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.800115 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.800133 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.800144 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:42Z","lastTransitionTime":"2026-01-23T08:20:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.823194 4711 generic.go:334] "Generic (PLEG): container finished" podID="90368f73-4e1c-477c-a507-30d6108ac6a1" containerID="71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b" exitCode=0 Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.823286 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" event={"ID":"90368f73-4e1c-477c-a507-30d6108ac6a1","Type":"ContainerDied","Data":"71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b"} Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.830473 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365"} Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.838432 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.874102 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.903384 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.903422 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.903431 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.903446 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.903457 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:42Z","lastTransitionTime":"2026-01-23T08:20:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.918337 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.954457 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:42 crc kubenswrapper[4711]: I0123 08:20:42.992733 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:42Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.006864 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.006902 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.006916 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.006935 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.006948 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:43Z","lastTransitionTime":"2026-01-23T08:20:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.032021 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.092035 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.109800 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.109838 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.109847 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.109863 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.109876 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:43Z","lastTransitionTime":"2026-01-23T08:20:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.120455 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.157860 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.194418 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 05:29:05.444925459 +0000 UTC Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.197764 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.214245 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.214321 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.214337 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.214361 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.214375 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:43Z","lastTransitionTime":"2026-01-23T08:20:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.234854 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.275728 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.317571 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.317620 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.317632 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.317651 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.317664 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:43Z","lastTransitionTime":"2026-01-23T08:20:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.323388 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.354450 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.393600 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.420095 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.420145 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.420157 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.420177 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.420190 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:43Z","lastTransitionTime":"2026-01-23T08:20:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.432789 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.473764 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.473841 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.473992 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.474024 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.474596 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.474673 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.476299 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.516211 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.525036 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.525086 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.525097 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.525119 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.525134 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:43Z","lastTransitionTime":"2026-01-23T08:20:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.553208 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.627586 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.627644 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.627655 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.627676 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.627691 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:43Z","lastTransitionTime":"2026-01-23T08:20:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.711521 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.711702 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:20:51.711663053 +0000 UTC m=+37.284619421 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.731201 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.731237 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.731251 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.731268 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.731278 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:43Z","lastTransitionTime":"2026-01-23T08:20:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.812742 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.812814 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.812852 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.812882 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.813021 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.813034 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.813086 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.813109 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.813104 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.813042 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.813211 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.813194 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:51.813163729 +0000 UTC m=+37.386120137 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.813269 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:51.813253441 +0000 UTC m=+37.386209809 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.813284 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:51.813276862 +0000 UTC m=+37.386233230 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.813404 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:20:43 crc kubenswrapper[4711]: E0123 08:20:43.813492 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:51.813463657 +0000 UTC m=+37.386420225 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.834032 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.834401 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.834517 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.834650 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.834718 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:43Z","lastTransitionTime":"2026-01-23T08:20:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.837406 4711 generic.go:334] "Generic (PLEG): container finished" podID="90368f73-4e1c-477c-a507-30d6108ac6a1" containerID="89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80" exitCode=0 Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.837461 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" event={"ID":"90368f73-4e1c-477c-a507-30d6108ac6a1","Type":"ContainerDied","Data":"89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80"} Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.857330 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.873784 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.889280 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.905083 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.921675 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.936876 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.937967 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.938034 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.938045 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.938067 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.938078 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:43Z","lastTransitionTime":"2026-01-23T08:20:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.958351 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.973661 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:43 crc kubenswrapper[4711]: I0123 08:20:43.991203 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:43Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.009675 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.025036 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.040431 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.040479 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.040491 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.040532 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.040548 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:44Z","lastTransitionTime":"2026-01-23T08:20:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.040555 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.072911 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.116670 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.144279 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.144330 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.144339 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.144356 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.144367 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:44Z","lastTransitionTime":"2026-01-23T08:20:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.152109 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.195314 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 12:29:23.672997802 +0000 UTC Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.246834 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.246932 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.246951 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.246986 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.247007 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:44Z","lastTransitionTime":"2026-01-23T08:20:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.349851 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.349908 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.349922 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.349942 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.349977 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:44Z","lastTransitionTime":"2026-01-23T08:20:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.457400 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.457450 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.457469 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.457490 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.457530 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:44Z","lastTransitionTime":"2026-01-23T08:20:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.560397 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.560947 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.560959 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.560978 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.560993 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:44Z","lastTransitionTime":"2026-01-23T08:20:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.664113 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.664171 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.664185 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.664207 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.664222 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:44Z","lastTransitionTime":"2026-01-23T08:20:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.747431 4711 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.766237 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.766280 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.766290 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.766305 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.766314 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:44Z","lastTransitionTime":"2026-01-23T08:20:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.848766 4711 generic.go:334] "Generic (PLEG): container finished" podID="90368f73-4e1c-477c-a507-30d6108ac6a1" containerID="008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba" exitCode=0 Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.848865 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" event={"ID":"90368f73-4e1c-477c-a507-30d6108ac6a1","Type":"ContainerDied","Data":"008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba"} Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.857392 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e"} Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.858017 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.858212 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.870116 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.870156 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.870168 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.870189 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.870202 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:44Z","lastTransitionTime":"2026-01-23T08:20:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.871634 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.886817 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.890981 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.899641 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.900202 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.913763 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.933625 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.946277 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.961576 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.980194 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.983151 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.983241 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.983253 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.984340 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.984374 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:44Z","lastTransitionTime":"2026-01-23T08:20:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:44 crc kubenswrapper[4711]: I0123 08:20:44.995656 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.006843 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.018470 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.030628 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.051064 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.071218 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.085316 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.087306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.087366 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.087383 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.087406 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.087421 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:45Z","lastTransitionTime":"2026-01-23T08:20:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.095533 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.109274 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.124077 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.137659 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.152618 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.170853 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.186758 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.190618 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.190641 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.190665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.190685 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.190699 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:45Z","lastTransitionTime":"2026-01-23T08:20:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.196084 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 01:51:20.659167634 +0000 UTC Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.202877 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.216882 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.230922 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.245777 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.259286 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.280677 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.293692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.293753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.293771 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.293795 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.293809 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:45Z","lastTransitionTime":"2026-01-23T08:20:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.317312 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.355714 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.396988 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.397037 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.397056 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.397082 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.397097 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:45Z","lastTransitionTime":"2026-01-23T08:20:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.445439 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.473453 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.473519 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.473463 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:45 crc kubenswrapper[4711]: E0123 08:20:45.473677 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:45 crc kubenswrapper[4711]: E0123 08:20:45.473809 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:45 crc kubenswrapper[4711]: E0123 08:20:45.473923 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.474306 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.493477 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.499928 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.499957 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.499967 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.499985 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.499999 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:45Z","lastTransitionTime":"2026-01-23T08:20:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.508681 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.524598 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.556930 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.597097 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.602846 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.603003 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.603095 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.603171 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.603247 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:45Z","lastTransitionTime":"2026-01-23T08:20:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.635238 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.677371 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.705884 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.706158 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.706225 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.706335 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.706410 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:45Z","lastTransitionTime":"2026-01-23T08:20:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.714037 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.756044 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.793946 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.829824 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.829863 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.829872 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.829891 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.829904 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:45Z","lastTransitionTime":"2026-01-23T08:20:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.832982 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.869292 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" event={"ID":"90368f73-4e1c-477c-a507-30d6108ac6a1","Type":"ContainerStarted","Data":"912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254"} Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.869805 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.879670 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.913429 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.934168 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.934209 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.934220 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.934238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.934250 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:45Z","lastTransitionTime":"2026-01-23T08:20:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.955618 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:45 crc kubenswrapper[4711]: I0123 08:20:45.994118 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.035788 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.038193 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.038240 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.038253 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.038275 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.038291 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:46Z","lastTransitionTime":"2026-01-23T08:20:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.078922 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.121931 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.141145 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.141189 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.141201 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.141221 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.141234 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:46Z","lastTransitionTime":"2026-01-23T08:20:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.152256 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.192114 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.196404 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 12:30:13.59048164 +0000 UTC Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.240564 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.244215 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.244261 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.244297 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.244323 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.244339 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:46Z","lastTransitionTime":"2026-01-23T08:20:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.277282 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.317883 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.348054 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.348144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.348160 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.348187 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.348202 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:46Z","lastTransitionTime":"2026-01-23T08:20:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.359277 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.403673 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.433751 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.451002 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.451392 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.451565 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.451720 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.451922 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:46Z","lastTransitionTime":"2026-01-23T08:20:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.474419 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.516548 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.557880 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.557945 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.557962 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.557989 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.558005 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:46Z","lastTransitionTime":"2026-01-23T08:20:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.559067 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.598602 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.640163 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.661355 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.661406 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.661415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.661436 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.661451 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:46Z","lastTransitionTime":"2026-01-23T08:20:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.673665 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.715102 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.760333 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.765629 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.765792 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.765878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.765962 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.766067 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:46Z","lastTransitionTime":"2026-01-23T08:20:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.795162 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.833108 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.869104 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.869194 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.869212 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.869241 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.869263 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:46Z","lastTransitionTime":"2026-01-23T08:20:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.872126 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.883119 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.912188 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.956695 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.972289 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.972348 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.972369 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.972395 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.972413 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:46Z","lastTransitionTime":"2026-01-23T08:20:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:46 crc kubenswrapper[4711]: I0123 08:20:46.995334 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:46Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.037779 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:47Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.075785 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.075912 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.075933 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.075968 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.075987 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:47Z","lastTransitionTime":"2026-01-23T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.082345 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:47Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.118432 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:47Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.155258 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:47Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.179409 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.179455 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.179465 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.179481 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.179491 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:47Z","lastTransitionTime":"2026-01-23T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.196848 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 09:49:50.411787889 +0000 UTC Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.283266 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.283333 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.283357 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.283386 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.283403 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:47Z","lastTransitionTime":"2026-01-23T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.389737 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.389793 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.389817 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.389838 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.389851 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:47Z","lastTransitionTime":"2026-01-23T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.473734 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.473815 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.473889 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:47 crc kubenswrapper[4711]: E0123 08:20:47.473921 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:47 crc kubenswrapper[4711]: E0123 08:20:47.474177 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:47 crc kubenswrapper[4711]: E0123 08:20:47.474099 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.492463 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.492502 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.492546 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.492566 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.492579 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:47Z","lastTransitionTime":"2026-01-23T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.596731 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.596788 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.596801 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.596824 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.596839 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:47Z","lastTransitionTime":"2026-01-23T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.699728 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.699762 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.699772 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.699789 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.699800 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:47Z","lastTransitionTime":"2026-01-23T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.803482 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.803566 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.803579 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.803599 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.803611 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:47Z","lastTransitionTime":"2026-01-23T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.906263 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.906316 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.906329 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.906351 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:47 crc kubenswrapper[4711]: I0123 08:20:47.906365 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:47Z","lastTransitionTime":"2026-01-23T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.009715 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.009766 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.009777 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.009797 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.009810 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:48Z","lastTransitionTime":"2026-01-23T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.113428 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.113484 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.113497 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.113574 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.113589 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:48Z","lastTransitionTime":"2026-01-23T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.197951 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 01:08:15.685427495 +0000 UTC Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.216647 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.216698 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.216710 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.216733 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.216748 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:48Z","lastTransitionTime":"2026-01-23T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.319664 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.319727 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.319743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.319764 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.319782 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:48Z","lastTransitionTime":"2026-01-23T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.423208 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.423626 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.423753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.423877 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.424042 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:48Z","lastTransitionTime":"2026-01-23T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.526888 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.526978 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.527014 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.527047 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.527070 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:48Z","lastTransitionTime":"2026-01-23T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.630869 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.630922 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.630943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.630962 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.630976 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:48Z","lastTransitionTime":"2026-01-23T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.734042 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.734126 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.734141 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.734169 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.734181 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:48Z","lastTransitionTime":"2026-01-23T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.837016 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.837064 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.837073 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.837091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.837104 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:48Z","lastTransitionTime":"2026-01-23T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.882656 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/0.log" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.886679 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e" exitCode=1 Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.886736 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e"} Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.887577 4711 scope.go:117] "RemoveContainer" containerID="eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.907740 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:48Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.926744 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:48Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.940069 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.940125 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.940136 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.940157 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.940169 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:48Z","lastTransitionTime":"2026-01-23T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.946387 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:48Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.961431 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:48Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.981798 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:48Z\\\",\\\"message\\\":\\\".893988 5953 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0123 08:20:47.894380 5953 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0123 08:20:47.894766 5953 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0123 08:20:47.894785 5953 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0123 08:20:47.894798 5953 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0123 08:20:47.894824 5953 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0123 08:20:47.894831 5953 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0123 08:20:47.894850 5953 factory.go:656] Stopping watch factory\\\\nI0123 08:20:47.894867 5953 ovnkube.go:599] Stopped ovnkube\\\\nI0123 08:20:47.894892 5953 handler.go:208] Removed *v1.Node event handler 2\\\\nI0123 08:20:47.894901 5953 handler.go:208] Removed *v1.Node event handler 7\\\\nI0123 08:20:47.894907 5953 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0123 08:20:47.894913 5953 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0123 08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:48Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:48 crc kubenswrapper[4711]: I0123 08:20:48.995634 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:48Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.008320 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.026736 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.048361 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.049192 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.049235 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.049244 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.049264 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.049277 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:49Z","lastTransitionTime":"2026-01-23T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.058943 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.070357 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.084706 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.104446 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.117647 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.132491 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.152323 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.152369 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.152378 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.152398 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.152409 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:49Z","lastTransitionTime":"2026-01-23T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.198949 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 01:47:04.923421849 +0000 UTC Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.255808 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.256584 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.256613 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.256651 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.256679 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:49Z","lastTransitionTime":"2026-01-23T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.359471 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.359553 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.359570 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.359596 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.359612 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:49Z","lastTransitionTime":"2026-01-23T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.462819 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.462895 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.462912 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.462944 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.462963 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:49Z","lastTransitionTime":"2026-01-23T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.473589 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.473657 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.473606 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:49 crc kubenswrapper[4711]: E0123 08:20:49.473814 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:49 crc kubenswrapper[4711]: E0123 08:20:49.473958 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:49 crc kubenswrapper[4711]: E0123 08:20:49.474021 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.567060 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.567119 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.567135 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.567161 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.567178 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:49Z","lastTransitionTime":"2026-01-23T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.669924 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.669980 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.669994 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.670016 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.670028 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:49Z","lastTransitionTime":"2026-01-23T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.776765 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.776815 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.776827 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.776848 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.776865 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:49Z","lastTransitionTime":"2026-01-23T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.880404 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.880462 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.880475 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.880492 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.880532 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:49Z","lastTransitionTime":"2026-01-23T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.896636 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/0.log" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.899965 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12"} Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.900162 4711 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.916101 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.932550 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.969031 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.983347 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.983407 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.983427 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.983456 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.983480 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:49Z","lastTransitionTime":"2026-01-23T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:49 crc kubenswrapper[4711]: I0123 08:20:49.992681 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:49Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.011313 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.028843 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.048916 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.065267 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.085268 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:48Z\\\",\\\"message\\\":\\\".893988 5953 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0123 08:20:47.894380 5953 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0123 08:20:47.894766 5953 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0123 08:20:47.894785 5953 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0123 08:20:47.894798 5953 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0123 08:20:47.894824 5953 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0123 08:20:47.894831 5953 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0123 08:20:47.894850 5953 factory.go:656] Stopping watch factory\\\\nI0123 08:20:47.894867 5953 ovnkube.go:599] Stopped ovnkube\\\\nI0123 08:20:47.894892 5953 handler.go:208] Removed *v1.Node event handler 2\\\\nI0123 08:20:47.894901 5953 handler.go:208] Removed *v1.Node event handler 7\\\\nI0123 08:20:47.894907 5953 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0123 08:20:47.894913 5953 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0123 08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.086940 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.086984 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.086998 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.087018 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.087033 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:50Z","lastTransitionTime":"2026-01-23T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.101346 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.115465 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.128127 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.144077 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.163009 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.177619 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.190689 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.190743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.190756 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.190777 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.190791 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:50Z","lastTransitionTime":"2026-01-23T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.199308 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 15:13:34.132733959 +0000 UTC Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.293601 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.293651 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.293667 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.293687 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.293701 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:50Z","lastTransitionTime":"2026-01-23T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.396888 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.396959 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.396976 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.397004 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.397022 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:50Z","lastTransitionTime":"2026-01-23T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.499658 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.499739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.499763 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.499797 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.499821 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:50Z","lastTransitionTime":"2026-01-23T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.540063 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc"] Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.541051 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.543974 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.544227 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.561969 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.578084 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.588023 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkk77\" (UniqueName: \"kubernetes.io/projected/bc8b51d9-cfd3-4da4-a51e-0f9656820731-kube-api-access-pkk77\") pod \"ovnkube-control-plane-749d76644c-qh4pc\" (UID: \"bc8b51d9-cfd3-4da4-a51e-0f9656820731\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.588131 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bc8b51d9-cfd3-4da4-a51e-0f9656820731-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qh4pc\" (UID: \"bc8b51d9-cfd3-4da4-a51e-0f9656820731\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.588196 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bc8b51d9-cfd3-4da4-a51e-0f9656820731-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qh4pc\" (UID: \"bc8b51d9-cfd3-4da4-a51e-0f9656820731\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.588248 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bc8b51d9-cfd3-4da4-a51e-0f9656820731-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qh4pc\" (UID: \"bc8b51d9-cfd3-4da4-a51e-0f9656820731\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.593929 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.602556 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.602596 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.602611 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.602631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.602644 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:50Z","lastTransitionTime":"2026-01-23T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.613313 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.626575 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.648630 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:48Z\\\",\\\"message\\\":\\\".893988 5953 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0123 08:20:47.894380 5953 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0123 08:20:47.894766 5953 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0123 08:20:47.894785 5953 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0123 08:20:47.894798 5953 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0123 08:20:47.894824 5953 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0123 08:20:47.894831 5953 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0123 08:20:47.894850 5953 factory.go:656] Stopping watch factory\\\\nI0123 08:20:47.894867 5953 ovnkube.go:599] Stopped ovnkube\\\\nI0123 08:20:47.894892 5953 handler.go:208] Removed *v1.Node event handler 2\\\\nI0123 08:20:47.894901 5953 handler.go:208] Removed *v1.Node event handler 7\\\\nI0123 08:20:47.894907 5953 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0123 08:20:47.894913 5953 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0123 08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.658730 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.672218 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.683760 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.689550 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bc8b51d9-cfd3-4da4-a51e-0f9656820731-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qh4pc\" (UID: \"bc8b51d9-cfd3-4da4-a51e-0f9656820731\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.689607 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bc8b51d9-cfd3-4da4-a51e-0f9656820731-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qh4pc\" (UID: \"bc8b51d9-cfd3-4da4-a51e-0f9656820731\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.689623 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bc8b51d9-cfd3-4da4-a51e-0f9656820731-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qh4pc\" (UID: \"bc8b51d9-cfd3-4da4-a51e-0f9656820731\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.689660 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkk77\" (UniqueName: \"kubernetes.io/projected/bc8b51d9-cfd3-4da4-a51e-0f9656820731-kube-api-access-pkk77\") pod \"ovnkube-control-plane-749d76644c-qh4pc\" (UID: \"bc8b51d9-cfd3-4da4-a51e-0f9656820731\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.690398 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bc8b51d9-cfd3-4da4-a51e-0f9656820731-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qh4pc\" (UID: \"bc8b51d9-cfd3-4da4-a51e-0f9656820731\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.690445 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bc8b51d9-cfd3-4da4-a51e-0f9656820731-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qh4pc\" (UID: \"bc8b51d9-cfd3-4da4-a51e-0f9656820731\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.695213 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bc8b51d9-cfd3-4da4-a51e-0f9656820731-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qh4pc\" (UID: \"bc8b51d9-cfd3-4da4-a51e-0f9656820731\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.698286 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.705832 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.705864 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.705876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.705894 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.705908 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:50Z","lastTransitionTime":"2026-01-23T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.709240 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkk77\" (UniqueName: \"kubernetes.io/projected/bc8b51d9-cfd3-4da4-a51e-0f9656820731-kube-api-access-pkk77\") pod \"ovnkube-control-plane-749d76644c-qh4pc\" (UID: \"bc8b51d9-cfd3-4da4-a51e-0f9656820731\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.715751 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.729486 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.743935 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.755389 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.766980 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.780066 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.808585 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.808631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.808642 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.808665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.808678 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:50Z","lastTransitionTime":"2026-01-23T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.855817 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.906071 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/1.log" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.906589 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/0.log" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.908586 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12" exitCode=1 Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.908753 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12"} Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.908836 4711 scope.go:117] "RemoveContainer" containerID="eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.910605 4711 scope.go:117] "RemoveContainer" containerID="8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12" Jan 23 08:20:50 crc kubenswrapper[4711]: E0123 08:20:50.914377 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.915518 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.915543 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.915554 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.915578 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.915590 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:50Z","lastTransitionTime":"2026-01-23T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.916479 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" event={"ID":"bc8b51d9-cfd3-4da4-a51e-0f9656820731","Type":"ContainerStarted","Data":"189b09ed4f9f3fed5a275f14ae20f71a5eb01732cbcf8a25e62374a8e84f72ba"} Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.927859 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.941253 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.954388 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.966731 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:50 crc kubenswrapper[4711]: I0123 08:20:50.981085 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:50Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.002425 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.016789 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.017935 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.017979 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.017989 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.018007 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.018018 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:51Z","lastTransitionTime":"2026-01-23T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.038412 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:48Z\\\",\\\"message\\\":\\\".893988 5953 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0123 08:20:47.894380 5953 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0123 08:20:47.894766 5953 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0123 08:20:47.894785 5953 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0123 08:20:47.894798 5953 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0123 08:20:47.894824 5953 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0123 08:20:47.894831 5953 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0123 08:20:47.894850 5953 factory.go:656] Stopping watch factory\\\\nI0123 08:20:47.894867 5953 ovnkube.go:599] Stopped ovnkube\\\\nI0123 08:20:47.894892 5953 handler.go:208] Removed *v1.Node event handler 2\\\\nI0123 08:20:47.894901 5953 handler.go:208] Removed *v1.Node event handler 7\\\\nI0123 08:20:47.894907 5953 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0123 08:20:47.894913 5953 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0123 08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:49Z\\\",\\\"message\\\":\\\" default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:20:49.954698 6114 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954705 6114 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954712 6114 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-955cc in node crc\\\\nI0123 08:20:49.954711 6114 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0123 08:20:49.954719 6114 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc after 0 failed attempt(s)\\\\nI0123 08:20:49.954726 6114 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nF0123 08:20:49.954716 6114 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.049251 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.063869 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.080353 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.094041 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.106761 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.121085 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.121221 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.121255 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.121264 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.121287 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.121307 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:51Z","lastTransitionTime":"2026-01-23T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.137530 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.150631 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.200121 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 02:26:34.528057516 +0000 UTC Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.224904 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.224964 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.224977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.224999 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.225014 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:51Z","lastTransitionTime":"2026-01-23T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.328933 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.329017 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.329038 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.329075 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.329101 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:51Z","lastTransitionTime":"2026-01-23T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.432476 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.432541 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.432550 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.432566 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.432576 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:51Z","lastTransitionTime":"2026-01-23T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.474478 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.474752 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.475423 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.475574 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.475675 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.475778 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.536730 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.536817 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.536844 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.536881 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.536906 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:51Z","lastTransitionTime":"2026-01-23T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.640783 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.640840 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.640857 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.640880 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.640894 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:51Z","lastTransitionTime":"2026-01-23T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.681824 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-zv6rd"] Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.682703 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.682822 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.701182 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.701274 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5x4v\" (UniqueName: \"kubernetes.io/projected/f2bbf296-ae82-4cc3-b07d-bba10895a545-kube-api-access-j5x4v\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.711595 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.725731 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.737010 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.743397 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.743434 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.743442 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.743458 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.743470 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:51Z","lastTransitionTime":"2026-01-23T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.753809 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.768354 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.782941 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.794629 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.801893 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.802088 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:21:07.802046502 +0000 UTC m=+53.375002920 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.802154 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5x4v\" (UniqueName: \"kubernetes.io/projected/f2bbf296-ae82-4cc3-b07d-bba10895a545-kube-api-access-j5x4v\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.802278 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.802456 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.802536 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs podName:f2bbf296-ae82-4cc3-b07d-bba10895a545 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:52.302495603 +0000 UTC m=+37.875451971 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs") pod "network-metrics-daemon-zv6rd" (UID: "f2bbf296-ae82-4cc3-b07d-bba10895a545") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.813301 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:48Z\\\",\\\"message\\\":\\\".893988 5953 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0123 08:20:47.894380 5953 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0123 08:20:47.894766 5953 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0123 08:20:47.894785 5953 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0123 08:20:47.894798 5953 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0123 08:20:47.894824 5953 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0123 08:20:47.894831 5953 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0123 08:20:47.894850 5953 factory.go:656] Stopping watch factory\\\\nI0123 08:20:47.894867 5953 ovnkube.go:599] Stopped ovnkube\\\\nI0123 08:20:47.894892 5953 handler.go:208] Removed *v1.Node event handler 2\\\\nI0123 08:20:47.894901 5953 handler.go:208] Removed *v1.Node event handler 7\\\\nI0123 08:20:47.894907 5953 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0123 08:20:47.894913 5953 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0123 08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:49Z\\\",\\\"message\\\":\\\" default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:20:49.954698 6114 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954705 6114 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954712 6114 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-955cc in node crc\\\\nI0123 08:20:49.954711 6114 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0123 08:20:49.954719 6114 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc after 0 failed attempt(s)\\\\nI0123 08:20:49.954726 6114 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nF0123 08:20:49.954716 6114 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.819203 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5x4v\" (UniqueName: \"kubernetes.io/projected/f2bbf296-ae82-4cc3-b07d-bba10895a545-kube-api-access-j5x4v\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.825206 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.836140 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.846001 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.846040 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.846050 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.846066 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.846076 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:51Z","lastTransitionTime":"2026-01-23T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.849182 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.860073 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.880741 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.894492 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.902950 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.902993 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.903013 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.903042 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.903114 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.903163 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:21:07.903148828 +0000 UTC m=+53.476105196 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.903466 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.903484 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.903496 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.903534 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-23 08:21:07.903527218 +0000 UTC m=+53.476483586 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.903574 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.903596 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.903604 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.903623 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-23 08:21:07.90361772 +0000 UTC m=+53.476574088 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.903665 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.903688 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:21:07.903679052 +0000 UTC m=+53.476635420 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.911875 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.921396 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/1.log" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.925098 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" event={"ID":"bc8b51d9-cfd3-4da4-a51e-0f9656820731","Type":"ContainerStarted","Data":"acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5"} Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.925143 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" event={"ID":"bc8b51d9-cfd3-4da4-a51e-0f9656820731","Type":"ContainerStarted","Data":"22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419"} Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.927710 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.935816 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.943678 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.946049 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.946743 4711 scope.go:117] "RemoveContainer" containerID="8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12" Jan 23 08:20:51 crc kubenswrapper[4711]: E0123 08:20:51.946915 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.948140 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.948169 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.948178 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.948192 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.948204 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:51Z","lastTransitionTime":"2026-01-23T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.954283 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.964122 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.975353 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:51 crc kubenswrapper[4711]: I0123 08:20:51.993981 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:51Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.007021 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.018399 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.034080 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.047178 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.051339 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.051370 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.051379 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.051397 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.051408 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.060647 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.072409 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.092624 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eebd2d3a40a01cc0194d35e263174feea94c283f0b008a6912b9170a4ea2622e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:48Z\\\",\\\"message\\\":\\\".893988 5953 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0123 08:20:47.894380 5953 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0123 08:20:47.894766 5953 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0123 08:20:47.894785 5953 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0123 08:20:47.894798 5953 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0123 08:20:47.894824 5953 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0123 08:20:47.894831 5953 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0123 08:20:47.894850 5953 factory.go:656] Stopping watch factory\\\\nI0123 08:20:47.894867 5953 ovnkube.go:599] Stopped ovnkube\\\\nI0123 08:20:47.894892 5953 handler.go:208] Removed *v1.Node event handler 2\\\\nI0123 08:20:47.894901 5953 handler.go:208] Removed *v1.Node event handler 7\\\\nI0123 08:20:47.894907 5953 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0123 08:20:47.894913 5953 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0123 08\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:49Z\\\",\\\"message\\\":\\\" default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:20:49.954698 6114 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954705 6114 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954712 6114 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-955cc in node crc\\\\nI0123 08:20:49.954711 6114 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0123 08:20:49.954719 6114 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc after 0 failed attempt(s)\\\\nI0123 08:20:49.954726 6114 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nF0123 08:20:49.954716 6114 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.095868 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.095911 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.095922 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.095941 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.095954 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.106106 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: E0123 08:20:52.118404 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.123091 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.123161 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.123217 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.123233 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.123257 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.123273 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.136028 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: E0123 08:20:52.136677 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.140740 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.141028 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.141117 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.141221 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.141315 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.153581 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: E0123 08:20:52.155462 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.159948 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.159977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.159987 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.160005 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.160017 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: E0123 08:20:52.172643 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.173790 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.178381 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.178426 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.178437 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.178455 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.178466 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.188474 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: E0123 08:20:52.191595 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: E0123 08:20:52.191715 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.193964 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.194001 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.194011 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.194030 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.194043 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.201263 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 05:48:48.343418133 +0000 UTC Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.202896 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.216218 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.227341 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.239590 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.250897 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.263162 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.284958 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.297224 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.297260 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.297270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.297290 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.297304 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.300851 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.306861 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:52 crc kubenswrapper[4711]: E0123 08:20:52.307018 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:20:52 crc kubenswrapper[4711]: E0123 08:20:52.307090 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs podName:f2bbf296-ae82-4cc3-b07d-bba10895a545 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:53.307070553 +0000 UTC m=+38.880026921 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs") pod "network-metrics-daemon-zv6rd" (UID: "f2bbf296-ae82-4cc3-b07d-bba10895a545") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.315059 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.327080 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.339924 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.350865 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.363227 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.387610 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:49Z\\\",\\\"message\\\":\\\" default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:20:49.954698 6114 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954705 6114 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954712 6114 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-955cc in node crc\\\\nI0123 08:20:49.954711 6114 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0123 08:20:49.954719 6114 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc after 0 failed attempt(s)\\\\nI0123 08:20:49.954726 6114 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nF0123 08:20:49.954716 6114 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.400319 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.400372 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.400384 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.400400 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.400410 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.402042 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.413900 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:52Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.505238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.505313 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.505332 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.505359 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.505380 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.609027 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.609078 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.609091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.609113 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.609126 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.711610 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.711648 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.711656 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.711672 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.711684 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.815261 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.815627 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.815788 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.815933 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.816064 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.919889 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.920036 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.920064 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.920097 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:52 crc kubenswrapper[4711]: I0123 08:20:52.920121 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:52Z","lastTransitionTime":"2026-01-23T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.023583 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.023641 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.023654 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.023676 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.023693 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:53Z","lastTransitionTime":"2026-01-23T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.126500 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.126883 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.126977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.127099 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.127394 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:53Z","lastTransitionTime":"2026-01-23T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.202038 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 17:29:27.729960902 +0000 UTC Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.230903 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.230990 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.231118 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.231156 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.231179 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:53Z","lastTransitionTime":"2026-01-23T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.318426 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:53 crc kubenswrapper[4711]: E0123 08:20:53.318742 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:20:53 crc kubenswrapper[4711]: E0123 08:20:53.318919 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs podName:f2bbf296-ae82-4cc3-b07d-bba10895a545 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:55.318872097 +0000 UTC m=+40.891828525 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs") pod "network-metrics-daemon-zv6rd" (UID: "f2bbf296-ae82-4cc3-b07d-bba10895a545") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.334521 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.334578 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.334590 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.334612 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.334627 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:53Z","lastTransitionTime":"2026-01-23T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.438032 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.438102 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.438119 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.438144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.438162 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:53Z","lastTransitionTime":"2026-01-23T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.473855 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.473967 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.473892 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:53 crc kubenswrapper[4711]: E0123 08:20:53.474104 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.474346 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:53 crc kubenswrapper[4711]: E0123 08:20:53.474546 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:53 crc kubenswrapper[4711]: E0123 08:20:53.474716 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:20:53 crc kubenswrapper[4711]: E0123 08:20:53.474861 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.541672 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.541740 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.541757 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.541786 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.541805 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:53Z","lastTransitionTime":"2026-01-23T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.645291 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.645366 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.645380 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.645405 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.645421 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:53Z","lastTransitionTime":"2026-01-23T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.748861 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.749672 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.749740 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.749774 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.749790 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:53Z","lastTransitionTime":"2026-01-23T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.852553 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.852916 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.853119 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.853260 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.853388 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:53Z","lastTransitionTime":"2026-01-23T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.957568 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.957630 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.957644 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.957667 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:53 crc kubenswrapper[4711]: I0123 08:20:53.957681 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:53Z","lastTransitionTime":"2026-01-23T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.060782 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.060900 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.060925 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.060956 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.060981 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:54Z","lastTransitionTime":"2026-01-23T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.163952 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.164013 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.164031 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.164056 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.164075 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:54Z","lastTransitionTime":"2026-01-23T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.203229 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 12:51:58.125548298 +0000 UTC Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.266977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.267060 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.267072 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.267093 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.267105 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:54Z","lastTransitionTime":"2026-01-23T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.370380 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.370426 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.370439 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.370456 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.370466 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:54Z","lastTransitionTime":"2026-01-23T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.474296 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.474348 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.474360 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.474381 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.474397 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:54Z","lastTransitionTime":"2026-01-23T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.576876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.576921 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.576930 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.576948 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.576959 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:54Z","lastTransitionTime":"2026-01-23T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.680461 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.680533 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.680549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.680572 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.680588 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:54Z","lastTransitionTime":"2026-01-23T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.783831 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.783896 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.783917 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.783947 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.783972 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:54Z","lastTransitionTime":"2026-01-23T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.886349 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.886402 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.886416 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.886436 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.886453 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:54Z","lastTransitionTime":"2026-01-23T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.989103 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.989160 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.989173 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.989192 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:54 crc kubenswrapper[4711]: I0123 08:20:54.989203 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:54Z","lastTransitionTime":"2026-01-23T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.092087 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.092134 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.092144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.092162 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.092175 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:55Z","lastTransitionTime":"2026-01-23T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.195661 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.195720 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.195729 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.195748 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.195766 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:55Z","lastTransitionTime":"2026-01-23T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.203981 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 02:03:32.928542458 +0000 UTC Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.298174 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.298231 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.298243 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.298263 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.298276 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:55Z","lastTransitionTime":"2026-01-23T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.345467 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:55 crc kubenswrapper[4711]: E0123 08:20:55.345738 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:20:55 crc kubenswrapper[4711]: E0123 08:20:55.345854 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs podName:f2bbf296-ae82-4cc3-b07d-bba10895a545 nodeName:}" failed. No retries permitted until 2026-01-23 08:20:59.34582638 +0000 UTC m=+44.918782788 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs") pod "network-metrics-daemon-zv6rd" (UID: "f2bbf296-ae82-4cc3-b07d-bba10895a545") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.402947 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.403034 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.403058 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.403095 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.403116 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:55Z","lastTransitionTime":"2026-01-23T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.473946 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.474080 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:55 crc kubenswrapper[4711]: E0123 08:20:55.474418 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:55 crc kubenswrapper[4711]: E0123 08:20:55.474293 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.474599 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:55 crc kubenswrapper[4711]: E0123 08:20:55.475161 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.475216 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:55 crc kubenswrapper[4711]: E0123 08:20:55.475327 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.490225 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.506582 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.506628 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.506640 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.506660 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.506670 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:55Z","lastTransitionTime":"2026-01-23T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.512920 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.542932 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.564335 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.586021 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:49Z\\\",\\\"message\\\":\\\" default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:20:49.954698 6114 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954705 6114 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954712 6114 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-955cc in node crc\\\\nI0123 08:20:49.954711 6114 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0123 08:20:49.954719 6114 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc after 0 failed attempt(s)\\\\nI0123 08:20:49.954726 6114 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nF0123 08:20:49.954716 6114 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.601757 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.609355 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.609412 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.609427 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.609448 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.609460 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:55Z","lastTransitionTime":"2026-01-23T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.619308 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.634867 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.648929 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.666549 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.680817 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.694256 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.711188 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.711908 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.711940 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.711950 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.711968 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.711978 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:55Z","lastTransitionTime":"2026-01-23T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.724578 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.734819 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.747732 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.761341 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:20:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.815019 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.815062 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.815072 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.815089 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.815101 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:55Z","lastTransitionTime":"2026-01-23T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.918255 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.918554 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.918694 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.918795 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:55 crc kubenswrapper[4711]: I0123 08:20:55.918895 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:55Z","lastTransitionTime":"2026-01-23T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.022308 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.022345 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.022355 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.022369 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.022380 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:56Z","lastTransitionTime":"2026-01-23T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.124405 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.124460 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.124476 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.124496 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.124540 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:56Z","lastTransitionTime":"2026-01-23T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.204676 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 06:36:46.093572658 +0000 UTC Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.227619 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.227668 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.227678 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.227697 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.227708 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:56Z","lastTransitionTime":"2026-01-23T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.330905 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.330961 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.330971 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.330992 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.331005 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:56Z","lastTransitionTime":"2026-01-23T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.434644 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.434710 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.434725 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.434808 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.434830 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:56Z","lastTransitionTime":"2026-01-23T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.538161 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.538221 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.538234 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.538252 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.538265 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:56Z","lastTransitionTime":"2026-01-23T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.641348 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.641400 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.641412 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.641434 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.641448 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:56Z","lastTransitionTime":"2026-01-23T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.748485 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.748566 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.748580 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.748601 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.748616 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:56Z","lastTransitionTime":"2026-01-23T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.851625 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.851685 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.851699 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.851722 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.851739 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:56Z","lastTransitionTime":"2026-01-23T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.954426 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.954484 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.954501 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.954562 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:56 crc kubenswrapper[4711]: I0123 08:20:56.954579 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:56Z","lastTransitionTime":"2026-01-23T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.057413 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.057486 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.057544 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.057570 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.057589 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:57Z","lastTransitionTime":"2026-01-23T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.160405 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.160455 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.160466 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.160484 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.160496 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:57Z","lastTransitionTime":"2026-01-23T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.205640 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 17:58:39.810581144 +0000 UTC Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.264812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.264879 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.264914 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.264954 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.264983 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:57Z","lastTransitionTime":"2026-01-23T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.368368 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.368432 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.368453 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.368480 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.368499 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:57Z","lastTransitionTime":"2026-01-23T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.471684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.471738 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.471748 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.471766 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.471777 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:57Z","lastTransitionTime":"2026-01-23T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.472946 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:57 crc kubenswrapper[4711]: E0123 08:20:57.473059 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.473184 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.473252 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.473309 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:57 crc kubenswrapper[4711]: E0123 08:20:57.473403 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:57 crc kubenswrapper[4711]: E0123 08:20:57.473604 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:57 crc kubenswrapper[4711]: E0123 08:20:57.473730 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.574654 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.574702 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.574714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.574732 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.574745 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:57Z","lastTransitionTime":"2026-01-23T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.677421 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.677481 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.677491 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.677526 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.677544 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:57Z","lastTransitionTime":"2026-01-23T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.781615 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.781718 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.781744 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.781789 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.781815 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:57Z","lastTransitionTime":"2026-01-23T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.884967 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.885025 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.885042 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.885068 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.885084 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:57Z","lastTransitionTime":"2026-01-23T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.987588 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.987643 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.987654 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.987670 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:57 crc kubenswrapper[4711]: I0123 08:20:57.987679 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:57Z","lastTransitionTime":"2026-01-23T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.090797 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.090872 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.090891 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.090920 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.090938 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:58Z","lastTransitionTime":"2026-01-23T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.193451 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.193541 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.193569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.193602 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.193621 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:58Z","lastTransitionTime":"2026-01-23T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.206615 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 19:25:55.835522453 +0000 UTC Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.296365 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.296415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.296425 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.296444 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.296458 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:58Z","lastTransitionTime":"2026-01-23T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.398927 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.398992 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.399014 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.399046 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.399072 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:58Z","lastTransitionTime":"2026-01-23T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.502144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.502205 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.502223 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.502249 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.502268 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:58Z","lastTransitionTime":"2026-01-23T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.606253 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.606335 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.606354 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.606383 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.606402 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:58Z","lastTransitionTime":"2026-01-23T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.709128 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.709201 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.709217 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.709238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.709251 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:58Z","lastTransitionTime":"2026-01-23T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.812383 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.812424 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.812434 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.812451 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.812463 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:58Z","lastTransitionTime":"2026-01-23T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.915820 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.915861 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.915870 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.915886 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:58 crc kubenswrapper[4711]: I0123 08:20:58.915896 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:58Z","lastTransitionTime":"2026-01-23T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.018907 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.019350 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.019542 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.019704 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.019853 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:59Z","lastTransitionTime":"2026-01-23T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.123200 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.123263 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.123273 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.123293 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.123306 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:59Z","lastTransitionTime":"2026-01-23T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.207661 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 19:22:21.504730097 +0000 UTC Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.226057 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.226124 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.226135 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.226157 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.226170 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:59Z","lastTransitionTime":"2026-01-23T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.329206 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.329259 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.329270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.329290 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.329304 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:59Z","lastTransitionTime":"2026-01-23T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.392438 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:59 crc kubenswrapper[4711]: E0123 08:20:59.392705 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:20:59 crc kubenswrapper[4711]: E0123 08:20:59.392809 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs podName:f2bbf296-ae82-4cc3-b07d-bba10895a545 nodeName:}" failed. No retries permitted until 2026-01-23 08:21:07.392781014 +0000 UTC m=+52.965737382 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs") pod "network-metrics-daemon-zv6rd" (UID: "f2bbf296-ae82-4cc3-b07d-bba10895a545") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.432218 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.432306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.432317 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.432333 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.432343 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:59Z","lastTransitionTime":"2026-01-23T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.472879 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.472938 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.472955 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.472908 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:20:59 crc kubenswrapper[4711]: E0123 08:20:59.473080 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:20:59 crc kubenswrapper[4711]: E0123 08:20:59.473185 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:20:59 crc kubenswrapper[4711]: E0123 08:20:59.473333 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:20:59 crc kubenswrapper[4711]: E0123 08:20:59.473454 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.535666 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.535716 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.535730 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.535750 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.535763 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:59Z","lastTransitionTime":"2026-01-23T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.638448 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.638497 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.638526 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.638544 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.638556 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:59Z","lastTransitionTime":"2026-01-23T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.742552 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.742605 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.742622 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.742640 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.742654 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:59Z","lastTransitionTime":"2026-01-23T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.846098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.846136 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.846146 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.846162 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.846174 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:59Z","lastTransitionTime":"2026-01-23T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.949189 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.949256 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.949271 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.949290 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:20:59 crc kubenswrapper[4711]: I0123 08:20:59.949303 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:20:59Z","lastTransitionTime":"2026-01-23T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.052305 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.052371 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.052388 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.052418 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.052438 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:00Z","lastTransitionTime":"2026-01-23T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.155404 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.155481 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.155499 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.155557 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.155582 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:00Z","lastTransitionTime":"2026-01-23T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.208337 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 14:45:21.527389005 +0000 UTC Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.259357 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.259457 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.259481 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.259553 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.259574 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:00Z","lastTransitionTime":"2026-01-23T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.362400 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.362455 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.362467 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.362488 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.362519 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:00Z","lastTransitionTime":"2026-01-23T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.466046 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.466091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.466102 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.466121 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.466132 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:00Z","lastTransitionTime":"2026-01-23T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.569761 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.569812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.569824 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.569844 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.569855 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:00Z","lastTransitionTime":"2026-01-23T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.672419 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.672479 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.672493 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.672536 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.672549 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:00Z","lastTransitionTime":"2026-01-23T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.775877 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.775920 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.775930 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.775948 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.775965 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:00Z","lastTransitionTime":"2026-01-23T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.879675 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.879752 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.879770 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.879802 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.879825 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:00Z","lastTransitionTime":"2026-01-23T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.983346 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.983393 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.983404 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.983419 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:00 crc kubenswrapper[4711]: I0123 08:21:00.983430 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:00Z","lastTransitionTime":"2026-01-23T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.087121 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.087188 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.087202 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.087227 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.087241 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:01Z","lastTransitionTime":"2026-01-23T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.190796 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.190858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.190874 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.190897 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.190912 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:01Z","lastTransitionTime":"2026-01-23T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.209058 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 02:44:45.288009269 +0000 UTC Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.294105 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.294152 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.294162 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.294181 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.294192 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:01Z","lastTransitionTime":"2026-01-23T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.396926 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.396996 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.397009 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.397227 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.397242 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:01Z","lastTransitionTime":"2026-01-23T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.473169 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.473226 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.473292 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.473312 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:01 crc kubenswrapper[4711]: E0123 08:21:01.473325 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:01 crc kubenswrapper[4711]: E0123 08:21:01.473436 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:01 crc kubenswrapper[4711]: E0123 08:21:01.473575 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:01 crc kubenswrapper[4711]: E0123 08:21:01.473646 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.499393 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.499433 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.499443 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.499461 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.499473 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:01Z","lastTransitionTime":"2026-01-23T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.602819 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.602878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.602891 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.602908 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.602919 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:01Z","lastTransitionTime":"2026-01-23T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.705622 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.705712 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.705737 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.705771 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.705797 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:01Z","lastTransitionTime":"2026-01-23T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.809300 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.809359 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.809370 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.809392 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.809406 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:01Z","lastTransitionTime":"2026-01-23T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.913179 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.913265 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.913293 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.913331 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:01 crc kubenswrapper[4711]: I0123 08:21:01.913357 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:01Z","lastTransitionTime":"2026-01-23T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.017441 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.017556 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.017581 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.017615 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.017643 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.121441 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.121569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.121603 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.121637 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.121662 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.209944 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 23:30:52.619494454 +0000 UTC Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.225097 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.225155 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.225175 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.225196 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.225209 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.328728 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.328784 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.328796 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.328818 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.328834 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.431888 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.431961 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.431974 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.431996 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.432010 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.474279 4711 scope.go:117] "RemoveContainer" containerID="8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.535531 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.535890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.535977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.536067 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.536142 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.556060 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.556447 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.556720 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.556947 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.557161 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: E0123 08:21:02.579035 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:02Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.584631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.585127 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.585148 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.585176 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.585196 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: E0123 08:21:02.605777 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:02Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.616974 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.617045 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.617062 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.617091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.617110 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: E0123 08:21:02.629876 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:02Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.634348 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.634395 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.634410 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.634431 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.634444 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: E0123 08:21:02.650912 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:02Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.655695 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.655969 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.656117 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.656222 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.656423 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: E0123 08:21:02.672662 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:02Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:02 crc kubenswrapper[4711]: E0123 08:21:02.673093 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.674888 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.675024 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.675090 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.675211 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.675320 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.778559 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.778889 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.778971 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.779076 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.779174 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.882048 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.882102 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.882116 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.882138 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.882155 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.985000 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.985099 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.985122 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.985149 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:02 crc kubenswrapper[4711]: I0123 08:21:02.985168 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:02Z","lastTransitionTime":"2026-01-23T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.089884 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.089937 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.089947 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.089966 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.089978 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:03Z","lastTransitionTime":"2026-01-23T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.192471 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.192515 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.192524 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.192541 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.192551 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:03Z","lastTransitionTime":"2026-01-23T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.211090 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 17:53:28.412060806 +0000 UTC Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.294876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.294921 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.294934 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.294952 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.294963 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:03Z","lastTransitionTime":"2026-01-23T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.397406 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.397455 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.397470 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.397489 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.397503 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:03Z","lastTransitionTime":"2026-01-23T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.473666 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.473813 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:03 crc kubenswrapper[4711]: E0123 08:21:03.473848 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:03 crc kubenswrapper[4711]: E0123 08:21:03.474035 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.474207 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:03 crc kubenswrapper[4711]: E0123 08:21:03.474280 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.474374 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:03 crc kubenswrapper[4711]: E0123 08:21:03.474635 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.501048 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.501115 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.501129 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.501151 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.501359 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:03Z","lastTransitionTime":"2026-01-23T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.604084 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.604129 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.604138 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.604155 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.604165 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:03Z","lastTransitionTime":"2026-01-23T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.706965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.707032 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.707045 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.707066 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.707081 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:03Z","lastTransitionTime":"2026-01-23T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.810095 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.810144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.810157 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.810182 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.810206 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:03Z","lastTransitionTime":"2026-01-23T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.912974 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.913023 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.913033 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.913054 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.913068 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:03Z","lastTransitionTime":"2026-01-23T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.970692 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/1.log" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.973912 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e"} Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.974466 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:21:03 crc kubenswrapper[4711]: I0123 08:21:03.988726 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:03Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.008221 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.028439 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.028539 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.028554 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.028584 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.028601 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:04Z","lastTransitionTime":"2026-01-23T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.034008 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.047080 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.060351 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.070187 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.080490 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.102991 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.114844 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.125902 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.132694 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.132743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.132755 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.132776 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.132792 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:04Z","lastTransitionTime":"2026-01-23T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.135738 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.146621 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.158486 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.172978 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.185905 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.198467 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.211976 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 01:46:52.370072408 +0000 UTC Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.216582 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:49Z\\\",\\\"message\\\":\\\" default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:20:49.954698 6114 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954705 6114 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954712 6114 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-955cc in node crc\\\\nI0123 08:20:49.954711 6114 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0123 08:20:49.954719 6114 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc after 0 failed attempt(s)\\\\nI0123 08:20:49.954726 6114 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nF0123 08:20:49.954716 6114 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.235533 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.235575 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.235590 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.235610 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.235625 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:04Z","lastTransitionTime":"2026-01-23T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.301870 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.311695 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.318172 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.332039 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.337849 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.338077 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.338186 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.338301 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.338411 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:04Z","lastTransitionTime":"2026-01-23T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.348076 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.373976 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.388082 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.403770 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.419907 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.434844 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.441019 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.441315 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.441450 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.441616 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.441758 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:04Z","lastTransitionTime":"2026-01-23T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.453253 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.471215 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:49Z\\\",\\\"message\\\":\\\" default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:20:49.954698 6114 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954705 6114 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954712 6114 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-955cc in node crc\\\\nI0123 08:20:49.954711 6114 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0123 08:20:49.954719 6114 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc after 0 failed attempt(s)\\\\nI0123 08:20:49.954726 6114 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nF0123 08:20:49.954716 6114 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.483740 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.496075 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.512790 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.529652 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.544110 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.544211 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.544238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.544274 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.544304 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:04Z","lastTransitionTime":"2026-01-23T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.546359 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.562412 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.578969 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.647818 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.647859 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.647869 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.647886 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.647899 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:04Z","lastTransitionTime":"2026-01-23T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.750883 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.750921 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.750931 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.750950 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.750960 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:04Z","lastTransitionTime":"2026-01-23T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.854304 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.854385 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.854400 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.854416 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.854427 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:04Z","lastTransitionTime":"2026-01-23T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.958031 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.958128 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.958141 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.958160 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.958172 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:04Z","lastTransitionTime":"2026-01-23T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.982632 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/2.log" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.984803 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/1.log" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.988586 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e" exitCode=1 Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.988694 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e"} Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.988803 4711 scope.go:117] "RemoveContainer" containerID="8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12" Jan 23 08:21:04 crc kubenswrapper[4711]: I0123 08:21:04.989384 4711 scope.go:117] "RemoveContainer" containerID="4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e" Jan 23 08:21:04 crc kubenswrapper[4711]: E0123 08:21:04.989573 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.011220 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.027125 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.045011 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.059491 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.061927 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.061997 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.062019 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.062052 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.062074 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:05Z","lastTransitionTime":"2026-01-23T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.074920 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.091690 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.105414 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.124107 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:49Z\\\",\\\"message\\\":\\\" default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:20:49.954698 6114 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954705 6114 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954712 6114 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-955cc in node crc\\\\nI0123 08:20:49.954711 6114 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0123 08:20:49.954719 6114 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc after 0 failed attempt(s)\\\\nI0123 08:20:49.954726 6114 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nF0123 08:20:49.954716 6114 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"message\\\":\\\"3897 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863918 6305 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 in node crc\\\\nI0123 08:21:03.863927 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 after 0 failed attempt(s)\\\\nI0123 08:21:03.863933 6305 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863932 6305 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI0123 08:21:03.863948 6305 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI0123 08:21:03.863953 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:21:03.863962 6305 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-bkn9c in node crc\\\\nI0123 08:21:03.863967 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-bkn9c after 0 failed attempt(s)\\\\nI0123 08:21:03.863974 6305 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bk\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.137436 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.148419 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.163297 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.165008 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.165047 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.165057 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.165073 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.165086 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:05Z","lastTransitionTime":"2026-01-23T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.174705 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.186872 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.198254 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.207608 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.212928 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 02:19:42.921603817 +0000 UTC Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.217388 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.226372 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.234973 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.267735 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.267777 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.267790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.267810 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.267825 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:05Z","lastTransitionTime":"2026-01-23T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.370620 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.370713 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.370741 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.370778 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.370805 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:05Z","lastTransitionTime":"2026-01-23T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.472963 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.473012 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.473002 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.473249 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:05 crc kubenswrapper[4711]: E0123 08:21:05.473314 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:05 crc kubenswrapper[4711]: E0123 08:21:05.473545 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:05 crc kubenswrapper[4711]: E0123 08:21:05.473842 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.473873 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:05 crc kubenswrapper[4711]: E0123 08:21:05.473955 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.473979 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.474054 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.474104 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.474188 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:05Z","lastTransitionTime":"2026-01-23T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.497179 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.513338 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.532971 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.554571 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.573584 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b6ed838287cb9d3ab5637d7cbb349dd53f617732b854254c32f9149a78fdf12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:20:49Z\\\",\\\"message\\\":\\\" default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:20:49.954698 6114 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954705 6114 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nI0123 08:20:49.954712 6114 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-955cc in node crc\\\\nI0123 08:20:49.954711 6114 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI0123 08:20:49.954719 6114 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-955cc after 0 failed attempt(s)\\\\nI0123 08:20:49.954726 6114 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-955cc\\\\nF0123 08:20:49.954716 6114 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization,\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"message\\\":\\\"3897 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863918 6305 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 in node crc\\\\nI0123 08:21:03.863927 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 after 0 failed attempt(s)\\\\nI0123 08:21:03.863933 6305 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863932 6305 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI0123 08:21:03.863948 6305 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI0123 08:21:03.863953 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:21:03.863962 6305 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-bkn9c in node crc\\\\nI0123 08:21:03.863967 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-bkn9c after 0 failed attempt(s)\\\\nI0123 08:21:03.863974 6305 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bk\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.579495 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.579557 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.579573 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.579607 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.579622 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:05Z","lastTransitionTime":"2026-01-23T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.588609 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.602691 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.614943 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.629757 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.649012 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.670555 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.682717 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.683436 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.683473 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.683483 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.683525 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.683536 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:05Z","lastTransitionTime":"2026-01-23T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.695636 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.709412 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.720934 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.743591 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.757308 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.772006 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:05Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.786688 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.786741 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.786756 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.786774 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.786789 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:05Z","lastTransitionTime":"2026-01-23T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.889281 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.889338 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.889349 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.889371 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.889382 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:05Z","lastTransitionTime":"2026-01-23T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.992463 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.992556 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.992575 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.992602 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.992623 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:05Z","lastTransitionTime":"2026-01-23T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:05 crc kubenswrapper[4711]: I0123 08:21:05.995682 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/2.log" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.000173 4711 scope.go:117] "RemoveContainer" containerID="4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e" Jan 23 08:21:06 crc kubenswrapper[4711]: E0123 08:21:06.000582 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.014165 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.029081 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.041974 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.053758 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.083420 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.101305 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.101867 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.101920 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.101931 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.101949 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.101962 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:06Z","lastTransitionTime":"2026-01-23T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.115262 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.126021 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.140478 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.155349 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.168831 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.183941 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.206144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.206190 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.206204 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.206222 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.206235 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:06Z","lastTransitionTime":"2026-01-23T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.206787 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"message\\\":\\\"3897 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863918 6305 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 in node crc\\\\nI0123 08:21:03.863927 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 after 0 failed attempt(s)\\\\nI0123 08:21:03.863933 6305 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863932 6305 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI0123 08:21:03.863948 6305 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI0123 08:21:03.863953 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:21:03.863962 6305 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-bkn9c in node crc\\\\nI0123 08:21:03.863967 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-bkn9c after 0 failed attempt(s)\\\\nI0123 08:21:03.863974 6305 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bk\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.213673 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 18:37:00.215063724 +0000 UTC Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.220038 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.234575 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.248684 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.267288 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.281498 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:06Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.309765 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.309812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.309821 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.309848 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.309860 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:06Z","lastTransitionTime":"2026-01-23T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.415590 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.415635 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.415648 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.415669 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.415683 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:06Z","lastTransitionTime":"2026-01-23T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.519563 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.519623 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.519663 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.519684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.519697 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:06Z","lastTransitionTime":"2026-01-23T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.623072 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.623140 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.623164 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.623190 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.623208 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:06Z","lastTransitionTime":"2026-01-23T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.726475 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.726566 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.726589 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.726612 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.726629 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:06Z","lastTransitionTime":"2026-01-23T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.830438 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.830569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.830600 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.830634 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.830657 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:06Z","lastTransitionTime":"2026-01-23T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.934706 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.934789 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.934808 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.934835 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:06 crc kubenswrapper[4711]: I0123 08:21:06.934854 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:06Z","lastTransitionTime":"2026-01-23T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.037765 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.037828 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.037845 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.037870 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.037887 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:07Z","lastTransitionTime":"2026-01-23T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.141273 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.141344 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.141363 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.141392 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.141412 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:07Z","lastTransitionTime":"2026-01-23T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.214735 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 10:59:59.817458163 +0000 UTC Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.244297 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.244374 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.244389 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.244414 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.244433 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:07Z","lastTransitionTime":"2026-01-23T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.348412 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.348482 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.348497 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.348546 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.348561 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:07Z","lastTransitionTime":"2026-01-23T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.451660 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.451715 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.451729 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.451749 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.451764 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:07Z","lastTransitionTime":"2026-01-23T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.473296 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.473364 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.473403 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.473296 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.473530 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.473584 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.473715 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.473808 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.491947 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.492239 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.492354 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs podName:f2bbf296-ae82-4cc3-b07d-bba10895a545 nodeName:}" failed. No retries permitted until 2026-01-23 08:21:23.492324529 +0000 UTC m=+69.065280917 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs") pod "network-metrics-daemon-zv6rd" (UID: "f2bbf296-ae82-4cc3-b07d-bba10895a545") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.555325 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.555678 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.555721 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.555743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.555756 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:07Z","lastTransitionTime":"2026-01-23T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.659220 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.659275 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.659292 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.659316 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.659333 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:07Z","lastTransitionTime":"2026-01-23T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.762482 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.762598 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.762624 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.762657 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.762682 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:07Z","lastTransitionTime":"2026-01-23T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.866092 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.866146 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.866159 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.866182 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.866197 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:07Z","lastTransitionTime":"2026-01-23T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.895813 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.896023 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:21:39.895986208 +0000 UTC m=+85.468942616 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.969433 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.969539 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.969558 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.969582 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.969598 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:07Z","lastTransitionTime":"2026-01-23T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.997106 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.997160 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.997195 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:07 crc kubenswrapper[4711]: I0123 08:21:07.997239 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.997420 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.997454 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.997587 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.997610 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.997676 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.997466 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.997829 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.997421 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.997704 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-23 08:21:39.997673598 +0000 UTC m=+85.570630006 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.997986 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:21:39.997968985 +0000 UTC m=+85.570925493 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.998009 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-23 08:21:39.997997107 +0000 UTC m=+85.570953585 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:21:07 crc kubenswrapper[4711]: E0123 08:21:07.998071 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:21:39.998021858 +0000 UTC m=+85.570978246 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.073792 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.074191 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.074375 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.074612 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.074801 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:08Z","lastTransitionTime":"2026-01-23T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.178640 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.178714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.178739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.178773 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.178798 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:08Z","lastTransitionTime":"2026-01-23T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.215425 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 13:27:41.69854715 +0000 UTC Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.282322 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.282372 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.282388 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.282413 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.282433 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:08Z","lastTransitionTime":"2026-01-23T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.385399 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.385487 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.385536 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.385567 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.385590 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:08Z","lastTransitionTime":"2026-01-23T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.489297 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.489343 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.489355 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.489373 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.489387 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:08Z","lastTransitionTime":"2026-01-23T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.592293 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.592367 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.592384 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.592413 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.592432 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:08Z","lastTransitionTime":"2026-01-23T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.695494 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.695556 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.695569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.695588 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.695601 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:08Z","lastTransitionTime":"2026-01-23T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.798200 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.798266 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.798281 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.798306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.798321 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:08Z","lastTransitionTime":"2026-01-23T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.901244 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.901318 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.901336 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.901363 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:08 crc kubenswrapper[4711]: I0123 08:21:08.901383 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:08Z","lastTransitionTime":"2026-01-23T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.005342 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.005423 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.005445 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.005480 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.005536 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:09Z","lastTransitionTime":"2026-01-23T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.109236 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.109302 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.109327 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.109357 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.109380 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:09Z","lastTransitionTime":"2026-01-23T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.212264 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.212346 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.212359 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.212381 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.212394 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:09Z","lastTransitionTime":"2026-01-23T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.216454 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 17:40:17.825441605 +0000 UTC Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.315849 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.315896 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.315915 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.315937 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.315951 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:09Z","lastTransitionTime":"2026-01-23T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.419480 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.419572 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.419589 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.419617 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.419637 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:09Z","lastTransitionTime":"2026-01-23T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.473785 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.473914 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.473822 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.473966 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:09 crc kubenswrapper[4711]: E0123 08:21:09.474034 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:09 crc kubenswrapper[4711]: E0123 08:21:09.474186 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:09 crc kubenswrapper[4711]: E0123 08:21:09.481102 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:09 crc kubenswrapper[4711]: E0123 08:21:09.481418 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.523465 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.523601 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.523623 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.523688 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.523706 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:09Z","lastTransitionTime":"2026-01-23T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.626820 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.626862 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.626895 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.626915 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.626930 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:09Z","lastTransitionTime":"2026-01-23T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.731105 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.731138 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.731147 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.731163 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.731173 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:09Z","lastTransitionTime":"2026-01-23T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.833306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.833485 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.833550 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.833575 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.833594 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:09Z","lastTransitionTime":"2026-01-23T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.937433 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.937545 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.937580 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.937611 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:09 crc kubenswrapper[4711]: I0123 08:21:09.937632 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:09Z","lastTransitionTime":"2026-01-23T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.040945 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.041023 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.041047 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.041080 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.041104 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:10Z","lastTransitionTime":"2026-01-23T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.144533 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.144586 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.144601 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.144646 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.144656 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:10Z","lastTransitionTime":"2026-01-23T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.216813 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 03:00:03.546487901 +0000 UTC Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.247025 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.247064 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.247073 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.247092 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.247103 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:10Z","lastTransitionTime":"2026-01-23T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.350270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.350323 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.350335 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.350357 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.350372 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:10Z","lastTransitionTime":"2026-01-23T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.453600 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.453704 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.453727 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.453757 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.453776 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:10Z","lastTransitionTime":"2026-01-23T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.557161 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.557215 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.557228 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.557247 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.557260 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:10Z","lastTransitionTime":"2026-01-23T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.660383 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.660443 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.660452 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.660500 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.660530 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:10Z","lastTransitionTime":"2026-01-23T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.762812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.762880 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.762906 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.762937 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.762960 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:10Z","lastTransitionTime":"2026-01-23T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.865682 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.865764 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.865786 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.865818 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.865843 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:10Z","lastTransitionTime":"2026-01-23T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.968118 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.968152 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.968164 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.968181 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:10 crc kubenswrapper[4711]: I0123 08:21:10.968193 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:10Z","lastTransitionTime":"2026-01-23T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.070478 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.070557 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.070570 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.070593 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.070607 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:11Z","lastTransitionTime":"2026-01-23T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.173401 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.173458 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.173468 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.173483 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.173492 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:11Z","lastTransitionTime":"2026-01-23T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.217681 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 02:59:37.920326279 +0000 UTC Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.276365 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.276468 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.276484 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.276543 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.276571 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:11Z","lastTransitionTime":"2026-01-23T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.379896 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.379965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.379983 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.380008 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.380026 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:11Z","lastTransitionTime":"2026-01-23T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.472791 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.472841 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:11 crc kubenswrapper[4711]: E0123 08:21:11.472955 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.472986 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:11 crc kubenswrapper[4711]: E0123 08:21:11.473050 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.472785 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:11 crc kubenswrapper[4711]: E0123 08:21:11.473130 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:11 crc kubenswrapper[4711]: E0123 08:21:11.473180 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.485168 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.485244 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.485266 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.485297 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.485320 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:11Z","lastTransitionTime":"2026-01-23T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.588911 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.588983 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.589002 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.589027 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.589045 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:11Z","lastTransitionTime":"2026-01-23T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.691718 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.691808 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.691844 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.691877 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.691899 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:11Z","lastTransitionTime":"2026-01-23T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.795807 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.795864 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.795880 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.795905 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.795921 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:11Z","lastTransitionTime":"2026-01-23T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.899294 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.899360 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.899378 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.899406 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:11 crc kubenswrapper[4711]: I0123 08:21:11.899425 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:11Z","lastTransitionTime":"2026-01-23T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.002467 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.002598 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.002617 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.002647 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.002668 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.105745 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.105824 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.105841 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.105866 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.105884 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.209007 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.209050 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.209058 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.209076 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.209086 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.218157 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 14:44:11.799705199 +0000 UTC Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.311802 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.311890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.311911 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.311941 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.311962 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.415776 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.415830 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.415843 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.415864 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.415879 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.518371 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.518422 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.518437 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.518456 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.518469 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.621748 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.621817 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.621834 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.621860 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.621881 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.724301 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.724372 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.724390 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.724415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.724432 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.828575 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.828676 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.828703 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.828748 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.828776 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.931568 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.931619 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.931634 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.931652 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.931664 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.955629 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.955690 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.955707 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.955729 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.955746 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: E0123 08:21:12.971152 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:12Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.975878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.975924 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.975933 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.975952 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.975963 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:12 crc kubenswrapper[4711]: E0123 08:21:12.989694 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:12Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.994221 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.994292 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.994306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.994328 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:12 crc kubenswrapper[4711]: I0123 08:21:12.994341 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:12Z","lastTransitionTime":"2026-01-23T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:13 crc kubenswrapper[4711]: E0123 08:21:13.010762 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:13Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.016019 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.016065 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.016080 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.016103 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.016119 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:13Z","lastTransitionTime":"2026-01-23T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:13 crc kubenswrapper[4711]: E0123 08:21:13.033774 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:13Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.037558 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.037607 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.037646 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.037673 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.037688 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:13Z","lastTransitionTime":"2026-01-23T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:13 crc kubenswrapper[4711]: E0123 08:21:13.049696 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:13Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:13 crc kubenswrapper[4711]: E0123 08:21:13.049814 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.052025 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.052073 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.052083 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.052100 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.052110 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:13Z","lastTransitionTime":"2026-01-23T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.155191 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.155236 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.155248 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.155267 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.155280 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:13Z","lastTransitionTime":"2026-01-23T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.218945 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 11:29:44.246561616 +0000 UTC Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.257306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.257667 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.257787 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.257967 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.258097 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:13Z","lastTransitionTime":"2026-01-23T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.361093 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.361133 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.361144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.361164 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.361174 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:13Z","lastTransitionTime":"2026-01-23T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.464011 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.464054 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.464063 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.464079 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.464089 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:13Z","lastTransitionTime":"2026-01-23T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.473497 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.473600 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.473673 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.473673 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:13 crc kubenswrapper[4711]: E0123 08:21:13.473836 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:13 crc kubenswrapper[4711]: E0123 08:21:13.473985 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:13 crc kubenswrapper[4711]: E0123 08:21:13.474154 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:13 crc kubenswrapper[4711]: E0123 08:21:13.474263 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.566561 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.566666 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.566693 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.566721 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.566734 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:13Z","lastTransitionTime":"2026-01-23T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.670187 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.670245 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.670254 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.670276 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.670291 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:13Z","lastTransitionTime":"2026-01-23T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.773666 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.773716 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.773730 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.773749 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.773767 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:13Z","lastTransitionTime":"2026-01-23T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.876282 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.876343 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.876356 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.876373 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.876386 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:13Z","lastTransitionTime":"2026-01-23T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.980655 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.980704 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.980714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.980743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:13 crc kubenswrapper[4711]: I0123 08:21:13.980756 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:13Z","lastTransitionTime":"2026-01-23T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.083736 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.083796 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.083814 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.083835 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.083853 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:14Z","lastTransitionTime":"2026-01-23T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.186838 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.186924 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.186940 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.186970 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.186999 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:14Z","lastTransitionTime":"2026-01-23T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.219698 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 12:56:28.120562273 +0000 UTC Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.289884 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.289945 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.289960 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.289982 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.289997 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:14Z","lastTransitionTime":"2026-01-23T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.393281 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.393351 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.393368 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.393389 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.393406 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:14Z","lastTransitionTime":"2026-01-23T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.496241 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.496297 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.496313 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.496332 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.496343 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:14Z","lastTransitionTime":"2026-01-23T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.599094 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.599144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.599154 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.599173 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.599183 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:14Z","lastTransitionTime":"2026-01-23T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.701334 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.701373 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.701383 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.701401 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.701412 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:14Z","lastTransitionTime":"2026-01-23T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.804425 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.804473 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.804489 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.804536 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.804552 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:14Z","lastTransitionTime":"2026-01-23T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.907296 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.907350 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.907365 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.907386 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:14 crc kubenswrapper[4711]: I0123 08:21:14.907402 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:14Z","lastTransitionTime":"2026-01-23T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.010026 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.010092 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.010108 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.010124 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.010135 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:15Z","lastTransitionTime":"2026-01-23T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.112963 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.113018 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.113030 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.113052 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.113066 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:15Z","lastTransitionTime":"2026-01-23T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.216274 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.216677 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.216686 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.216705 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.216715 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:15Z","lastTransitionTime":"2026-01-23T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.220347 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 14:23:34.899691491 +0000 UTC Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.319950 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.320000 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.320012 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.320031 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.320043 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:15Z","lastTransitionTime":"2026-01-23T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.422654 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.422718 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.422732 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.422753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.422767 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:15Z","lastTransitionTime":"2026-01-23T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.472796 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.472938 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.473136 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:15 crc kubenswrapper[4711]: E0123 08:21:15.473336 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:15 crc kubenswrapper[4711]: E0123 08:21:15.473117 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.473625 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:15 crc kubenswrapper[4711]: E0123 08:21:15.473690 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:15 crc kubenswrapper[4711]: E0123 08:21:15.473790 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.487646 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.511900 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.525422 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.525462 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.525474 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.525493 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.525522 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:15Z","lastTransitionTime":"2026-01-23T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.547197 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.569358 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"message\\\":\\\"3897 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863918 6305 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 in node crc\\\\nI0123 08:21:03.863927 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 after 0 failed attempt(s)\\\\nI0123 08:21:03.863933 6305 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863932 6305 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI0123 08:21:03.863948 6305 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI0123 08:21:03.863953 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:21:03.863962 6305 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-bkn9c in node crc\\\\nI0123 08:21:03.863967 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-bkn9c after 0 failed attempt(s)\\\\nI0123 08:21:03.863974 6305 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bk\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.581580 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.593080 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.606147 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.618569 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.628329 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.628377 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.628391 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.628410 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.628423 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:15Z","lastTransitionTime":"2026-01-23T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.634997 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.648481 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.661842 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.672954 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.684780 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.694654 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.713823 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.731037 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.731093 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.731104 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.731124 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.731138 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:15Z","lastTransitionTime":"2026-01-23T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.731173 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.745146 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.757287 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:15Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.834245 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.834311 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.834323 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.834342 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.834355 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:15Z","lastTransitionTime":"2026-01-23T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.937600 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.937677 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.937688 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.937706 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:15 crc kubenswrapper[4711]: I0123 08:21:15.937716 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:15Z","lastTransitionTime":"2026-01-23T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.040325 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.040423 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.040435 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.040453 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.040467 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:16Z","lastTransitionTime":"2026-01-23T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.142998 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.143046 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.143056 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.143077 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.143104 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:16Z","lastTransitionTime":"2026-01-23T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.221591 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 01:54:39.204650485 +0000 UTC Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.245914 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.245989 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.246002 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.246021 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.246035 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:16Z","lastTransitionTime":"2026-01-23T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.349001 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.349063 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.349076 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.349098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.349113 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:16Z","lastTransitionTime":"2026-01-23T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.451189 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.451229 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.451238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.451254 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.451265 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:16Z","lastTransitionTime":"2026-01-23T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.553567 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.553610 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.553620 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.553639 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.553650 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:16Z","lastTransitionTime":"2026-01-23T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.655577 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.655629 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.655647 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.655666 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.655687 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:16Z","lastTransitionTime":"2026-01-23T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.758400 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.758758 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.758826 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.758933 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.759033 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:16Z","lastTransitionTime":"2026-01-23T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.861463 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.861808 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.861896 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.861993 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.862078 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:16Z","lastTransitionTime":"2026-01-23T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.964574 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.964936 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.965017 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.965095 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:16 crc kubenswrapper[4711]: I0123 08:21:16.965179 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:16Z","lastTransitionTime":"2026-01-23T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.067788 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.067848 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.067858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.067876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.067887 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:17Z","lastTransitionTime":"2026-01-23T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.170973 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.171024 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.171036 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.171065 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.171079 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:17Z","lastTransitionTime":"2026-01-23T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.222143 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 05:18:39.113814377 +0000 UTC Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.273800 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.274269 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.274343 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.274439 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.274529 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:17Z","lastTransitionTime":"2026-01-23T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.377334 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.377378 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.377388 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.377405 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.377415 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:17Z","lastTransitionTime":"2026-01-23T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.473083 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.473132 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.473193 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:17 crc kubenswrapper[4711]: E0123 08:21:17.473369 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.473393 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:17 crc kubenswrapper[4711]: E0123 08:21:17.473539 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:17 crc kubenswrapper[4711]: E0123 08:21:17.473676 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:17 crc kubenswrapper[4711]: E0123 08:21:17.473819 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.474612 4711 scope.go:117] "RemoveContainer" containerID="4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e" Jan 23 08:21:17 crc kubenswrapper[4711]: E0123 08:21:17.474850 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.481415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.481478 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.481494 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.481542 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.481555 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:17Z","lastTransitionTime":"2026-01-23T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.588089 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.588138 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.588148 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.588168 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.588187 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:17Z","lastTransitionTime":"2026-01-23T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.691161 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.691234 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.691306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.691337 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.691357 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:17Z","lastTransitionTime":"2026-01-23T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.794804 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.794853 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.794865 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.794885 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.794897 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:17Z","lastTransitionTime":"2026-01-23T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.897673 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.897725 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.897740 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.897760 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:17 crc kubenswrapper[4711]: I0123 08:21:17.897774 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:17Z","lastTransitionTime":"2026-01-23T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.000575 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.000617 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.000626 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.000646 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.000655 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:18Z","lastTransitionTime":"2026-01-23T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.102771 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.102811 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.102821 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.102837 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.102850 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:18Z","lastTransitionTime":"2026-01-23T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.205433 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.205471 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.205482 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.205500 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.205527 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:18Z","lastTransitionTime":"2026-01-23T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.222922 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 21:24:29.50063801 +0000 UTC Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.309983 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.310039 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.310051 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.310075 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.310092 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:18Z","lastTransitionTime":"2026-01-23T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.413306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.413363 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.413375 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.413397 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.413410 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:18Z","lastTransitionTime":"2026-01-23T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.516636 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.516680 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.516689 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.516709 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.516719 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:18Z","lastTransitionTime":"2026-01-23T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.619237 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.619486 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.619565 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.619640 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.619702 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:18Z","lastTransitionTime":"2026-01-23T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.722480 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.722796 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.722924 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.723006 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.723089 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:18Z","lastTransitionTime":"2026-01-23T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.825377 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.825426 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.825438 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.825457 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.825474 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:18Z","lastTransitionTime":"2026-01-23T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.928417 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.928739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.928818 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.928934 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:18 crc kubenswrapper[4711]: I0123 08:21:18.929028 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:18Z","lastTransitionTime":"2026-01-23T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.032684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.032743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.032755 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.032779 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.032796 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:19Z","lastTransitionTime":"2026-01-23T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.136361 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.136414 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.136428 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.136462 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.136476 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:19Z","lastTransitionTime":"2026-01-23T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.223199 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 01:42:32.629442963 +0000 UTC Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.239965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.240020 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.240035 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.240060 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.240076 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:19Z","lastTransitionTime":"2026-01-23T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.344338 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.344388 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.344399 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.344419 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.344440 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:19Z","lastTransitionTime":"2026-01-23T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.447644 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.447696 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.447714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.447733 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.447748 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:19Z","lastTransitionTime":"2026-01-23T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.473746 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.473829 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.473922 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:19 crc kubenswrapper[4711]: E0123 08:21:19.473939 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:19 crc kubenswrapper[4711]: E0123 08:21:19.474031 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:19 crc kubenswrapper[4711]: E0123 08:21:19.474101 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.474229 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:19 crc kubenswrapper[4711]: E0123 08:21:19.474275 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.550440 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.550478 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.550488 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.550520 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.550531 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:19Z","lastTransitionTime":"2026-01-23T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.652927 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.652978 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.652989 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.653008 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.653026 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:19Z","lastTransitionTime":"2026-01-23T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.756018 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.756095 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.756110 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.756132 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.756143 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:19Z","lastTransitionTime":"2026-01-23T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.859248 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.859298 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.859311 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.859331 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.859347 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:19Z","lastTransitionTime":"2026-01-23T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.962040 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.962348 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.962528 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.962811 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:19 crc kubenswrapper[4711]: I0123 08:21:19.963038 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:19Z","lastTransitionTime":"2026-01-23T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.066064 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.066363 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.066523 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.066624 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.066714 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:20Z","lastTransitionTime":"2026-01-23T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.170163 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.170587 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.170728 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.170822 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.170901 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:20Z","lastTransitionTime":"2026-01-23T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.223332 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 04:30:04.331611915 +0000 UTC Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.273409 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.273447 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.273460 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.273480 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.273492 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:20Z","lastTransitionTime":"2026-01-23T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.382410 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.382467 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.382478 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.382496 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.382541 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:20Z","lastTransitionTime":"2026-01-23T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.486488 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.486539 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.486549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.486565 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.486576 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:20Z","lastTransitionTime":"2026-01-23T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.588807 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.588900 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.588911 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.588928 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.588940 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:20Z","lastTransitionTime":"2026-01-23T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.690837 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.690890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.690907 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.690933 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.690947 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:20Z","lastTransitionTime":"2026-01-23T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.794312 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.794358 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.794374 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.794395 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.794409 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:20Z","lastTransitionTime":"2026-01-23T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.897448 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.897500 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.897533 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.897554 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:20 crc kubenswrapper[4711]: I0123 08:21:20.897568 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:20Z","lastTransitionTime":"2026-01-23T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.000191 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.000232 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.000243 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.000257 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.000266 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:21Z","lastTransitionTime":"2026-01-23T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.102690 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.102737 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.102746 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.102762 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.102773 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:21Z","lastTransitionTime":"2026-01-23T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.205041 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.205378 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.205477 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.205615 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.205709 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:21Z","lastTransitionTime":"2026-01-23T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.224054 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 00:20:01.543313676 +0000 UTC Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.310113 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.310162 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.310173 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.310201 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.310216 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:21Z","lastTransitionTime":"2026-01-23T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.413364 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.413405 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.413414 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.413431 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.413444 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:21Z","lastTransitionTime":"2026-01-23T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.475727 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.475746 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:21 crc kubenswrapper[4711]: E0123 08:21:21.476115 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.475873 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:21 crc kubenswrapper[4711]: E0123 08:21:21.476463 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.475846 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:21 crc kubenswrapper[4711]: E0123 08:21:21.476742 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:21 crc kubenswrapper[4711]: E0123 08:21:21.476217 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.516316 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.516371 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.516382 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.516400 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.516414 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:21Z","lastTransitionTime":"2026-01-23T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.619247 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.619294 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.619305 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.619323 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.619337 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:21Z","lastTransitionTime":"2026-01-23T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.721771 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.721821 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.721830 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.721848 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.721858 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:21Z","lastTransitionTime":"2026-01-23T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.824361 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.824717 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.824790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.824866 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.824947 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:21Z","lastTransitionTime":"2026-01-23T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.928972 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.929021 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.929033 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.929052 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:21 crc kubenswrapper[4711]: I0123 08:21:21.929067 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:21Z","lastTransitionTime":"2026-01-23T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.032015 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.032297 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.032394 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.032473 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.032605 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:22Z","lastTransitionTime":"2026-01-23T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.135521 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.135569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.135582 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.135599 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.135611 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:22Z","lastTransitionTime":"2026-01-23T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.224618 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 11:30:33.596570213 +0000 UTC Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.238804 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.238841 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.238850 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.238867 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.238879 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:22Z","lastTransitionTime":"2026-01-23T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.341931 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.341972 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.341983 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.342002 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.342015 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:22Z","lastTransitionTime":"2026-01-23T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.444799 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.444835 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.444845 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.444862 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.444874 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:22Z","lastTransitionTime":"2026-01-23T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.547165 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.547222 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.547236 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.547258 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.547272 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:22Z","lastTransitionTime":"2026-01-23T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.649926 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.650117 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.650127 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.650152 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.650163 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:22Z","lastTransitionTime":"2026-01-23T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.752989 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.753038 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.753050 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.753069 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.753085 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:22Z","lastTransitionTime":"2026-01-23T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.855738 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.856133 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.856250 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.856344 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.856444 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:22Z","lastTransitionTime":"2026-01-23T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.959598 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.959647 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.959661 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.959682 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:22 crc kubenswrapper[4711]: I0123 08:21:22.959697 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:22Z","lastTransitionTime":"2026-01-23T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.061462 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.061520 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.061532 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.061549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.061561 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.165494 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.165853 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.165933 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.166019 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.166092 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.225092 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 20:25:21.362098846 +0000 UTC Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.268673 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.268716 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.268729 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.268753 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.268768 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.307444 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.307495 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.307525 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.307549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.307563 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: E0123 08:21:23.326118 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:23Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.332041 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.332103 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.332114 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.332151 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.332165 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: E0123 08:21:23.344712 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:23Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.348663 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.348724 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.348743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.348764 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.348778 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: E0123 08:21:23.362454 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:23Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.366848 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.366893 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.366906 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.366931 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.366942 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: E0123 08:21:23.382750 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:23Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.387041 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.387078 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.387090 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.387114 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.387128 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: E0123 08:21:23.400711 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:23Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:23 crc kubenswrapper[4711]: E0123 08:21:23.401156 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.402920 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.403056 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.403146 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.403226 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.403302 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.472907 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.473017 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.473021 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:23 crc kubenswrapper[4711]: E0123 08:21:23.473587 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:23 crc kubenswrapper[4711]: E0123 08:21:23.473529 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.473040 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:23 crc kubenswrapper[4711]: E0123 08:21:23.473775 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:23 crc kubenswrapper[4711]: E0123 08:21:23.473798 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.505823 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.505860 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.505871 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.505885 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.505895 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.570210 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:23 crc kubenswrapper[4711]: E0123 08:21:23.570448 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:21:23 crc kubenswrapper[4711]: E0123 08:21:23.570585 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs podName:f2bbf296-ae82-4cc3-b07d-bba10895a545 nodeName:}" failed. No retries permitted until 2026-01-23 08:21:55.570559008 +0000 UTC m=+101.143515376 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs") pod "network-metrics-daemon-zv6rd" (UID: "f2bbf296-ae82-4cc3-b07d-bba10895a545") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.608376 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.608446 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.608462 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.608485 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.608502 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.710723 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.710778 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.710792 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.710811 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.710827 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.813177 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.813223 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.813235 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.813253 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.813265 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.915959 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.915996 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.916005 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.916020 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:23 crc kubenswrapper[4711]: I0123 08:21:23.916030 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:23Z","lastTransitionTime":"2026-01-23T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.018420 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.018464 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.018478 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.018496 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.018526 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:24Z","lastTransitionTime":"2026-01-23T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.121049 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.121088 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.121102 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.121120 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.121133 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:24Z","lastTransitionTime":"2026-01-23T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.223992 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.224053 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.224066 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.224086 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.224099 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:24Z","lastTransitionTime":"2026-01-23T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.227173 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 19:29:28.002359332 +0000 UTC Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.326729 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.326782 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.326798 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.326816 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.326826 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:24Z","lastTransitionTime":"2026-01-23T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.433669 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.433724 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.433736 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.433759 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.433777 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:24Z","lastTransitionTime":"2026-01-23T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.537051 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.537113 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.537123 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.537142 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.537153 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:24Z","lastTransitionTime":"2026-01-23T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.640631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.640682 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.640692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.640710 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.640722 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:24Z","lastTransitionTime":"2026-01-23T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.743523 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.743569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.743579 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.743595 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.743608 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:24Z","lastTransitionTime":"2026-01-23T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.846557 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.846618 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.846654 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.846675 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.846688 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:24Z","lastTransitionTime":"2026-01-23T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.949307 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.949367 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.949380 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.949400 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:24 crc kubenswrapper[4711]: I0123 08:21:24.949412 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:24Z","lastTransitionTime":"2026-01-23T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.051912 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.051953 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.051965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.051981 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.051990 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:25Z","lastTransitionTime":"2026-01-23T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.155374 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.155449 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.155462 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.155480 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.155522 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:25Z","lastTransitionTime":"2026-01-23T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.228322 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 21:29:54.530093475 +0000 UTC Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.258098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.258651 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.258770 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.258868 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.259244 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:25Z","lastTransitionTime":"2026-01-23T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.362284 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.362314 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.362377 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.362393 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.362401 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:25Z","lastTransitionTime":"2026-01-23T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.465215 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.465254 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.465327 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.465344 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.465354 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:25Z","lastTransitionTime":"2026-01-23T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.473835 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:25 crc kubenswrapper[4711]: E0123 08:21:25.473954 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.473948 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:25 crc kubenswrapper[4711]: E0123 08:21:25.474037 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.474421 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:25 crc kubenswrapper[4711]: E0123 08:21:25.474490 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.474611 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:25 crc kubenswrapper[4711]: E0123 08:21:25.474714 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.494472 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.506799 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.520488 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.533336 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.551607 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.566093 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.568625 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.568679 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.568698 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.568727 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.568753 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:25Z","lastTransitionTime":"2026-01-23T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.585009 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"message\\\":\\\"3897 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863918 6305 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 in node crc\\\\nI0123 08:21:03.863927 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 after 0 failed attempt(s)\\\\nI0123 08:21:03.863933 6305 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863932 6305 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI0123 08:21:03.863948 6305 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI0123 08:21:03.863953 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:21:03.863962 6305 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-bkn9c in node crc\\\\nI0123 08:21:03.863967 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-bkn9c after 0 failed attempt(s)\\\\nI0123 08:21:03.863974 6305 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bk\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.598499 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.611723 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.629057 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.645254 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.657048 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.670475 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.672030 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.672068 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.672082 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.672106 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.672122 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:25Z","lastTransitionTime":"2026-01-23T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.683493 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.697798 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.719614 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.733307 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.748858 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:25Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.776812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.776863 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.776872 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.776889 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.776902 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:25Z","lastTransitionTime":"2026-01-23T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.879844 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.879891 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.879901 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.879941 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.879953 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:25Z","lastTransitionTime":"2026-01-23T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.983225 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.983290 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.983304 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.983325 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:25 crc kubenswrapper[4711]: I0123 08:21:25.983338 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:25Z","lastTransitionTime":"2026-01-23T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.087005 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.087056 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.087068 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.087089 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.087103 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:26Z","lastTransitionTime":"2026-01-23T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.190602 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.190648 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.190659 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.190678 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.190691 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:26Z","lastTransitionTime":"2026-01-23T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.229419 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 00:45:01.523422362 +0000 UTC Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.293461 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.293540 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.293551 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.293572 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.293583 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:26Z","lastTransitionTime":"2026-01-23T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.397161 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.397222 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.397236 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.397256 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.397267 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:26Z","lastTransitionTime":"2026-01-23T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.501000 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.501487 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.501547 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.501571 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.501584 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:26Z","lastTransitionTime":"2026-01-23T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.604412 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.604466 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.604479 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.604498 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.604532 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:26Z","lastTransitionTime":"2026-01-23T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.708455 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.708536 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.708549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.708570 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.708582 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:26Z","lastTransitionTime":"2026-01-23T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.810915 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.810977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.810997 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.811017 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.811027 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:26Z","lastTransitionTime":"2026-01-23T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.914254 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.914310 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.914319 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.914339 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:26 crc kubenswrapper[4711]: I0123 08:21:26.914351 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:26Z","lastTransitionTime":"2026-01-23T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.016978 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.017028 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.017036 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.017055 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.017065 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:27Z","lastTransitionTime":"2026-01-23T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.119266 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.119330 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.119344 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.119364 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.119377 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:27Z","lastTransitionTime":"2026-01-23T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.221684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.221727 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.221737 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.221752 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.221763 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:27Z","lastTransitionTime":"2026-01-23T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.230025 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 16:41:46.975382673 +0000 UTC Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.324209 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.324281 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.324326 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.324349 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.324364 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:27Z","lastTransitionTime":"2026-01-23T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.427196 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.427252 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.427263 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.427280 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.427292 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:27Z","lastTransitionTime":"2026-01-23T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.472792 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.472836 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.472872 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.472800 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:27 crc kubenswrapper[4711]: E0123 08:21:27.472988 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:27 crc kubenswrapper[4711]: E0123 08:21:27.473088 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:27 crc kubenswrapper[4711]: E0123 08:21:27.473202 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:27 crc kubenswrapper[4711]: E0123 08:21:27.473233 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.530331 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.530385 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.530396 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.530415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.530430 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:27Z","lastTransitionTime":"2026-01-23T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.633692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.633739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.633752 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.633775 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.633790 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:27Z","lastTransitionTime":"2026-01-23T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.735908 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.736050 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.736060 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.736075 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.736085 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:27Z","lastTransitionTime":"2026-01-23T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.839061 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.839100 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.839112 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.839132 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.839144 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:27Z","lastTransitionTime":"2026-01-23T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.942363 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.942404 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.942417 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.942438 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:27 crc kubenswrapper[4711]: I0123 08:21:27.942454 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:27Z","lastTransitionTime":"2026-01-23T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.046446 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.046538 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.046555 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.046580 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.046635 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:28Z","lastTransitionTime":"2026-01-23T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.079458 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpxkq_8cc803a0-2626-4444-b4b2-8e9567277d44/kube-multus/0.log" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.079548 4711 generic.go:334] "Generic (PLEG): container finished" podID="8cc803a0-2626-4444-b4b2-8e9567277d44" containerID="e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6" exitCode=1 Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.079587 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpxkq" event={"ID":"8cc803a0-2626-4444-b4b2-8e9567277d44","Type":"ContainerDied","Data":"e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6"} Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.080131 4711 scope.go:117] "RemoveContainer" containerID="e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.096905 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.113317 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.131288 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.149780 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:28Z\\\",\\\"message\\\":\\\"2026-01-23T08:20:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37\\\\n2026-01-23T08:20:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37 to /host/opt/cni/bin/\\\\n2026-01-23T08:20:41Z [verbose] multus-daemon started\\\\n2026-01-23T08:20:41Z [verbose] Readiness Indicator file check\\\\n2026-01-23T08:21:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.157587 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.157629 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.157639 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.157658 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.157670 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:28Z","lastTransitionTime":"2026-01-23T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.160960 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.172018 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.183359 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.197733 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.209621 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.228234 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.230347 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 08:44:41.247144565 +0000 UTC Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.245247 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.259592 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.259633 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.259643 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.259658 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.259669 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:28Z","lastTransitionTime":"2026-01-23T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.268614 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"message\\\":\\\"3897 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863918 6305 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 in node crc\\\\nI0123 08:21:03.863927 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 after 0 failed attempt(s)\\\\nI0123 08:21:03.863933 6305 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863932 6305 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI0123 08:21:03.863948 6305 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI0123 08:21:03.863953 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:21:03.863962 6305 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-bkn9c in node crc\\\\nI0123 08:21:03.863967 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-bkn9c after 0 failed attempt(s)\\\\nI0123 08:21:03.863974 6305 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bk\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.284063 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.295725 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.310713 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.328890 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.344136 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.357936 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:28Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.361850 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.361887 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.361904 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.361922 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.361934 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:28Z","lastTransitionTime":"2026-01-23T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.465306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.465448 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.465468 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.465491 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.465525 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:28Z","lastTransitionTime":"2026-01-23T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.473568 4711 scope.go:117] "RemoveContainer" containerID="4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.569788 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.569852 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.569866 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.569892 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.569907 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:28Z","lastTransitionTime":"2026-01-23T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.673667 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.673703 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.673714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.673735 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.673749 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:28Z","lastTransitionTime":"2026-01-23T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.777150 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.777197 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.777210 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.777229 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.777283 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:28Z","lastTransitionTime":"2026-01-23T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.880617 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.880665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.880679 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.880703 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.880718 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:28Z","lastTransitionTime":"2026-01-23T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.983110 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.983166 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.983180 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.983209 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:28 crc kubenswrapper[4711]: I0123 08:21:28.983227 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:28Z","lastTransitionTime":"2026-01-23T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.084636 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/2.log" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.087164 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073"} Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.088392 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.090094 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpxkq_8cc803a0-2626-4444-b4b2-8e9567277d44/kube-multus/0.log" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.090140 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpxkq" event={"ID":"8cc803a0-2626-4444-b4b2-8e9567277d44","Type":"ContainerStarted","Data":"eafae4831c9cebb04dc1fe0259fa32717a44734f39508d8cd162ae212d1429fa"} Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.114527 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.114592 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.114605 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.114624 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.114637 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:29Z","lastTransitionTime":"2026-01-23T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.120545 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.136024 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.157460 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.180053 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:28Z\\\",\\\"message\\\":\\\"2026-01-23T08:20:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37\\\\n2026-01-23T08:20:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37 to /host/opt/cni/bin/\\\\n2026-01-23T08:20:41Z [verbose] multus-daemon started\\\\n2026-01-23T08:20:41Z [verbose] Readiness Indicator file check\\\\n2026-01-23T08:21:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.197830 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.213193 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.221501 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.221577 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.221589 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.221624 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.221639 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:29Z","lastTransitionTime":"2026-01-23T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.230559 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 04:55:08.085895466 +0000 UTC Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.231635 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.248283 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.280185 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.297223 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.312469 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.325237 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.325268 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.325277 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.325292 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.325303 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:29Z","lastTransitionTime":"2026-01-23T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.330813 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.346109 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.362409 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.381103 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.399273 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.420799 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"message\\\":\\\"3897 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863918 6305 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 in node crc\\\\nI0123 08:21:03.863927 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 after 0 failed attempt(s)\\\\nI0123 08:21:03.863933 6305 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863932 6305 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI0123 08:21:03.863948 6305 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI0123 08:21:03.863953 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:21:03.863962 6305 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-bkn9c in node crc\\\\nI0123 08:21:03.863967 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-bkn9c after 0 failed attempt(s)\\\\nI0123 08:21:03.863974 6305 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bk\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.428003 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.428052 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.428081 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.428099 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.428110 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:29Z","lastTransitionTime":"2026-01-23T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.446145 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.472878 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.472948 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.473058 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:29 crc kubenswrapper[4711]: E0123 08:21:29.473054 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:29 crc kubenswrapper[4711]: E0123 08:21:29.473180 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:29 crc kubenswrapper[4711]: E0123 08:21:29.473283 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.473486 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:29 crc kubenswrapper[4711]: E0123 08:21:29.473580 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.483442 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.497274 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.506292 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.523207 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.530458 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.530499 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.530521 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.530539 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.530551 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:29Z","lastTransitionTime":"2026-01-23T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.547921 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"message\\\":\\\"3897 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863918 6305 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 in node crc\\\\nI0123 08:21:03.863927 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 after 0 failed attempt(s)\\\\nI0123 08:21:03.863933 6305 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863932 6305 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI0123 08:21:03.863948 6305 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI0123 08:21:03.863953 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:21:03.863962 6305 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-bkn9c in node crc\\\\nI0123 08:21:03.863967 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-bkn9c after 0 failed attempt(s)\\\\nI0123 08:21:03.863974 6305 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bk\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.564053 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.576367 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.597000 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.616720 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.634013 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.634064 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.634077 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.634100 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.634113 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:29Z","lastTransitionTime":"2026-01-23T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.638160 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.659843 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eafae4831c9cebb04dc1fe0259fa32717a44734f39508d8cd162ae212d1429fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:28Z\\\",\\\"message\\\":\\\"2026-01-23T08:20:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37\\\\n2026-01-23T08:20:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37 to /host/opt/cni/bin/\\\\n2026-01-23T08:20:41Z [verbose] multus-daemon started\\\\n2026-01-23T08:20:41Z [verbose] Readiness Indicator file check\\\\n2026-01-23T08:21:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.687609 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.699033 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.714140 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.727865 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.736536 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.736572 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.736581 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.736598 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.736608 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:29Z","lastTransitionTime":"2026-01-23T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.750901 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.768331 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.783707 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.798489 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:29Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.839003 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.839059 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.839081 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.839104 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.839119 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:29Z","lastTransitionTime":"2026-01-23T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.941738 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.941789 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.941802 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.941821 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:29 crc kubenswrapper[4711]: I0123 08:21:29.941833 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:29Z","lastTransitionTime":"2026-01-23T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.045876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.045942 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.045961 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.045989 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.046009 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:30Z","lastTransitionTime":"2026-01-23T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.148919 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.149001 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.149025 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.149060 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.149084 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:30Z","lastTransitionTime":"2026-01-23T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.231002 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 22:14:17.222954535 +0000 UTC Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.252243 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.252317 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.252343 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.252463 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.252578 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:30Z","lastTransitionTime":"2026-01-23T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.355670 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.355716 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.355728 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.355746 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.355759 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:30Z","lastTransitionTime":"2026-01-23T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.460931 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.460997 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.461010 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.461035 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.461048 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:30Z","lastTransitionTime":"2026-01-23T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.564730 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.564819 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.564832 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.564857 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.564871 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:30Z","lastTransitionTime":"2026-01-23T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.667840 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.667892 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.667901 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.667919 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.667933 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:30Z","lastTransitionTime":"2026-01-23T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.771202 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.771273 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.771294 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.771334 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.771350 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:30Z","lastTransitionTime":"2026-01-23T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.874961 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.875027 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.875042 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.875066 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.875080 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:30Z","lastTransitionTime":"2026-01-23T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.978628 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.978715 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.978727 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.978747 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:30 crc kubenswrapper[4711]: I0123 08:21:30.978761 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:30Z","lastTransitionTime":"2026-01-23T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.081874 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.081969 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.081987 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.082012 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.082029 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:31Z","lastTransitionTime":"2026-01-23T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.099850 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/3.log" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.100623 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/2.log" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.105960 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073" exitCode=1 Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.106179 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073"} Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.106393 4711 scope.go:117] "RemoveContainer" containerID="4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.108102 4711 scope.go:117] "RemoveContainer" containerID="677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073" Jan 23 08:21:31 crc kubenswrapper[4711]: E0123 08:21:31.108483 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.127871 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.145754 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.173032 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"message\\\":\\\"3897 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863918 6305 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 in node crc\\\\nI0123 08:21:03.863927 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 after 0 failed attempt(s)\\\\nI0123 08:21:03.863933 6305 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863932 6305 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI0123 08:21:03.863948 6305 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI0123 08:21:03.863953 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:21:03.863962 6305 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-bkn9c in node crc\\\\nI0123 08:21:03.863967 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-bkn9c after 0 failed attempt(s)\\\\nI0123 08:21:03.863974 6305 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bk\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:30Z\\\",\\\"message\\\":\\\"2025-02-23 05:35:30 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[ingress.openshift.io/canary:canary_controller] map[service.beta.openshift.io/serving-cert-secret-name:canary-serving-cert] [{apps/v1 daemonset ingress-canary f5a2759b-dc3c-483d-93f0-055bac962b12 0xc00793cad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:8443-tcp,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},ServicePort{Name:8888-tcp,Protocol:TCP,Port:8888,TargetPort:{0 8888 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{ingresscanary.operator.openshift.io/daemonset-ingresscanary: canary_controller,},ClusterIP:10.217.5.34,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0123 08:21:30.021859 6669 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nI0123 08:21:30.021872 6669 services_controller.go:443] Built service open\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.184259 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.184302 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.184314 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.184332 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.184345 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:31Z","lastTransitionTime":"2026-01-23T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.186457 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.199888 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.215920 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.231117 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.231372 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 17:19:36.740228114 +0000 UTC Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.249825 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.264876 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eafae4831c9cebb04dc1fe0259fa32717a44734f39508d8cd162ae212d1429fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:28Z\\\",\\\"message\\\":\\\"2026-01-23T08:20:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37\\\\n2026-01-23T08:20:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37 to /host/opt/cni/bin/\\\\n2026-01-23T08:20:41Z [verbose] multus-daemon started\\\\n2026-01-23T08:20:41Z [verbose] Readiness Indicator file check\\\\n2026-01-23T08:21:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.277038 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.286990 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.287269 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.287344 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.287428 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.287515 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:31Z","lastTransitionTime":"2026-01-23T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.291890 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.304224 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.319160 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.337731 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.363240 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.380598 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.390731 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.390936 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.391080 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.391347 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.391567 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:31Z","lastTransitionTime":"2026-01-23T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.396204 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.413921 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.425043 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a63253b-ad88-471e-a8c2-9ea74aa20b29\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05ca21b49a07b82a6e6767f1839594c7ead4801d43541cce740a12b11dca6f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bc864465d2dc98d051395f369a6ca2628f21b5f91ec4fa7f8432e1b28aa5e93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bc864465d2dc98d051395f369a6ca2628f21b5f91ec4fa7f8432e1b28aa5e93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:31Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.473638 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.473706 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:31 crc kubenswrapper[4711]: E0123 08:21:31.473804 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.473831 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:31 crc kubenswrapper[4711]: E0123 08:21:31.473855 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:31 crc kubenswrapper[4711]: E0123 08:21:31.474017 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.474381 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:31 crc kubenswrapper[4711]: E0123 08:21:31.474607 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.493979 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.494040 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.494055 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.494079 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.494094 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:31Z","lastTransitionTime":"2026-01-23T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.596926 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.597014 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.597030 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.597052 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.597066 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:31Z","lastTransitionTime":"2026-01-23T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.700347 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.700835 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.700961 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.701072 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.701177 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:31Z","lastTransitionTime":"2026-01-23T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.803684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.803728 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.803736 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.803752 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.803761 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:31Z","lastTransitionTime":"2026-01-23T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.907268 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.907322 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.907334 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.907353 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:31 crc kubenswrapper[4711]: I0123 08:21:31.907366 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:31Z","lastTransitionTime":"2026-01-23T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.011351 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.011418 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.011434 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.011457 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.011476 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:32Z","lastTransitionTime":"2026-01-23T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.113779 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.113857 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.113879 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.113920 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.113961 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:32Z","lastTransitionTime":"2026-01-23T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.117958 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/3.log" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.217376 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.217429 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.217450 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.217472 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.217488 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:32Z","lastTransitionTime":"2026-01-23T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.231980 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 07:57:18.342449096 +0000 UTC Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.319982 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.320023 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.320033 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.320050 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.320061 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:32Z","lastTransitionTime":"2026-01-23T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.422666 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.422741 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.422760 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.422787 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.422810 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:32Z","lastTransitionTime":"2026-01-23T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.526031 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.526138 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.526165 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.526197 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.526216 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:32Z","lastTransitionTime":"2026-01-23T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.628902 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.628949 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.628964 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.628983 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.628995 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:32Z","lastTransitionTime":"2026-01-23T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.732481 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.732557 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.732568 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.732586 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.732597 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:32Z","lastTransitionTime":"2026-01-23T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.835186 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.835238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.835252 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.835275 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.835291 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:32Z","lastTransitionTime":"2026-01-23T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.938943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.939000 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.939017 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.939041 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:32 crc kubenswrapper[4711]: I0123 08:21:32.939058 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:32Z","lastTransitionTime":"2026-01-23T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.042425 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.042697 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.042762 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.042897 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.043014 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.145357 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.145401 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.145414 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.145432 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.145443 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.232410 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 22:39:06.361324243 +0000 UTC Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.249301 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.249358 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.249370 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.249394 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.249407 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.352636 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.352685 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.352701 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.352725 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.352742 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.457187 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.457238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.457250 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.457270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.457284 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.473603 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:33 crc kubenswrapper[4711]: E0123 08:21:33.473788 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.473630 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:33 crc kubenswrapper[4711]: E0123 08:21:33.473893 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.473892 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:33 crc kubenswrapper[4711]: E0123 08:21:33.474078 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.473603 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:33 crc kubenswrapper[4711]: E0123 08:21:33.474264 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.560488 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.560545 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.560556 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.560574 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.560586 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.590259 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.590325 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.590338 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.590359 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.590375 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: E0123 08:21:33.609474 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:33Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.614373 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.614445 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.614468 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.614534 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.614560 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: E0123 08:21:33.633540 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:33Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.647877 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.647937 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.647960 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.647990 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.648014 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: E0123 08:21:33.668353 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:33Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.672120 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.672158 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.672173 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.672191 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.672203 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: E0123 08:21:33.684999 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:33Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.688794 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.688821 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.688831 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.688845 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.688854 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: E0123 08:21:33.700406 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:33Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:33 crc kubenswrapper[4711]: E0123 08:21:33.700543 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.702209 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.702238 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.702251 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.702267 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.702278 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.804461 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.804546 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.804572 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.804601 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.804619 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.906857 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.906899 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.906909 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.906925 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:33 crc kubenswrapper[4711]: I0123 08:21:33.906937 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:33Z","lastTransitionTime":"2026-01-23T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.010361 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.010415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.010434 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.010456 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.010470 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:34Z","lastTransitionTime":"2026-01-23T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.112792 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.112840 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.112852 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.112870 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.112885 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:34Z","lastTransitionTime":"2026-01-23T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.216341 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.216809 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.216943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.217037 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.217108 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:34Z","lastTransitionTime":"2026-01-23T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.234095 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 21:43:07.216002362 +0000 UTC Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.319690 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.319769 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.319779 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.319798 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.319811 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:34Z","lastTransitionTime":"2026-01-23T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.422919 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.422971 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.422983 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.423006 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.423020 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:34Z","lastTransitionTime":"2026-01-23T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.526919 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.526959 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.526968 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.526986 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.526998 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:34Z","lastTransitionTime":"2026-01-23T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.629660 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.629712 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.629722 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.629741 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.629756 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:34Z","lastTransitionTime":"2026-01-23T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.731834 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.731881 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.731893 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.731911 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.731923 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:34Z","lastTransitionTime":"2026-01-23T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.834679 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.834734 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.834751 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.834773 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.834786 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:34Z","lastTransitionTime":"2026-01-23T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.938004 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.938043 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.938054 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.938071 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:34 crc kubenswrapper[4711]: I0123 08:21:34.938082 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:34Z","lastTransitionTime":"2026-01-23T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.041212 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.041262 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.041279 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.041306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.041319 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:35Z","lastTransitionTime":"2026-01-23T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.143621 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.143660 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.143669 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.143684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.143695 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:35Z","lastTransitionTime":"2026-01-23T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.234613 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 23:24:30.06913257 +0000 UTC Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.248034 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.248119 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.248144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.248183 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.248209 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:35Z","lastTransitionTime":"2026-01-23T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.351855 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.352163 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.352176 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.352194 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.352207 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:35Z","lastTransitionTime":"2026-01-23T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.454783 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.454831 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.454845 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.454869 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.454887 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:35Z","lastTransitionTime":"2026-01-23T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.473147 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.473202 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:35 crc kubenswrapper[4711]: E0123 08:21:35.473315 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.473343 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:35 crc kubenswrapper[4711]: E0123 08:21:35.473467 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:35 crc kubenswrapper[4711]: E0123 08:21:35.473556 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.473707 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:35 crc kubenswrapper[4711]: E0123 08:21:35.473792 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.488891 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.510373 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.523547 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.537705 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.552239 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.557695 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.557730 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.557738 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.557754 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.557765 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:35Z","lastTransitionTime":"2026-01-23T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.569808 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4870a9c1f150bed3eab9a6f07734e43811b7a7abf960f239717c78ab6b41277e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"message\\\":\\\"3897 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863918 6305 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 in node crc\\\\nI0123 08:21:03.863927 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-2t9r8 after 0 failed attempt(s)\\\\nI0123 08:21:03.863933 6305 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-2t9r8\\\\nI0123 08:21:03.863932 6305 default_network_controller.go:776] Recording success event on pod openshift-etcd/etcd-crc\\\\nI0123 08:21:03.863948 6305 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI0123 08:21:03.863953 6305 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-bkn9c\\\\nI0123 08:21:03.863962 6305 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-bkn9c in node crc\\\\nI0123 08:21:03.863967 6305 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-bkn9c after 0 failed attempt(s)\\\\nI0123 08:21:03.863974 6305 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-bk\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:30Z\\\",\\\"message\\\":\\\"2025-02-23 05:35:30 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[ingress.openshift.io/canary:canary_controller] map[service.beta.openshift.io/serving-cert-secret-name:canary-serving-cert] [{apps/v1 daemonset ingress-canary f5a2759b-dc3c-483d-93f0-055bac962b12 0xc00793cad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:8443-tcp,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},ServicePort{Name:8888-tcp,Protocol:TCP,Port:8888,TargetPort:{0 8888 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{ingresscanary.operator.openshift.io/daemonset-ingresscanary: canary_controller,},ClusterIP:10.217.5.34,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0123 08:21:30.021859 6669 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nI0123 08:21:30.021872 6669 services_controller.go:443] Built service open\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.580764 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.595919 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.608500 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.623960 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.641244 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eafae4831c9cebb04dc1fe0259fa32717a44734f39508d8cd162ae212d1429fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:28Z\\\",\\\"message\\\":\\\"2026-01-23T08:20:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37\\\\n2026-01-23T08:20:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37 to /host/opt/cni/bin/\\\\n2026-01-23T08:20:41Z [verbose] multus-daemon started\\\\n2026-01-23T08:20:41Z [verbose] Readiness Indicator file check\\\\n2026-01-23T08:21:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.652852 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.660671 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.660731 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.660744 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.660765 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.660781 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:35Z","lastTransitionTime":"2026-01-23T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.666965 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.679855 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.692547 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.702615 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a63253b-ad88-471e-a8c2-9ea74aa20b29\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05ca21b49a07b82a6e6767f1839594c7ead4801d43541cce740a12b11dca6f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bc864465d2dc98d051395f369a6ca2628f21b5f91ec4fa7f8432e1b28aa5e93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bc864465d2dc98d051395f369a6ca2628f21b5f91ec4fa7f8432e1b28aa5e93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.720201 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.733446 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.747628 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:35Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.763080 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.763134 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.763147 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.763168 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.763181 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:35Z","lastTransitionTime":"2026-01-23T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.865285 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.865340 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.865354 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.865374 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.865392 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:35Z","lastTransitionTime":"2026-01-23T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.968395 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.968460 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.968472 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.968493 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:35 crc kubenswrapper[4711]: I0123 08:21:35.968528 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:35Z","lastTransitionTime":"2026-01-23T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.072132 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.072200 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.072213 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.072230 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.072243 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:36Z","lastTransitionTime":"2026-01-23T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.175190 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.175245 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.175258 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.175277 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.175291 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:36Z","lastTransitionTime":"2026-01-23T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.235782 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 17:51:27.37891677 +0000 UTC Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.278071 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.278132 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.278142 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.278163 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.278177 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:36Z","lastTransitionTime":"2026-01-23T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.381023 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.381070 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.381079 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.381096 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.381110 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:36Z","lastTransitionTime":"2026-01-23T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.487757 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.487808 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.487818 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.487839 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.487850 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:36Z","lastTransitionTime":"2026-01-23T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.590205 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.590266 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.590279 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.590300 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.590317 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:36Z","lastTransitionTime":"2026-01-23T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.692735 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.692796 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.692806 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.692827 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.692838 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:36Z","lastTransitionTime":"2026-01-23T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.795552 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.795671 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.795692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.795725 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.795741 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:36Z","lastTransitionTime":"2026-01-23T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.898438 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.898544 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.898559 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.898579 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:36 crc kubenswrapper[4711]: I0123 08:21:36.898590 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:36Z","lastTransitionTime":"2026-01-23T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.001080 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.001140 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.001156 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.001178 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.001190 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:37Z","lastTransitionTime":"2026-01-23T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.104030 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.104091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.104109 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.104130 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.104143 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:37Z","lastTransitionTime":"2026-01-23T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.207627 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.207688 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.207705 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.207724 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.207734 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:37Z","lastTransitionTime":"2026-01-23T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.236104 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 19:09:34.42584945 +0000 UTC Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.310192 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.310252 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.310265 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.310282 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.310293 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:37Z","lastTransitionTime":"2026-01-23T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.413258 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.413333 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.413343 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.413362 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.413373 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:37Z","lastTransitionTime":"2026-01-23T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.473209 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.473209 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.473335 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.473396 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:37 crc kubenswrapper[4711]: E0123 08:21:37.473607 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:37 crc kubenswrapper[4711]: E0123 08:21:37.473833 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:37 crc kubenswrapper[4711]: E0123 08:21:37.473888 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:37 crc kubenswrapper[4711]: E0123 08:21:37.473961 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.515999 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.516049 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.516058 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.516076 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.516092 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:37Z","lastTransitionTime":"2026-01-23T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.618787 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.618836 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.618852 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.618872 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.618884 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:37Z","lastTransitionTime":"2026-01-23T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.721174 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.721233 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.721247 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.721267 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.721280 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:37Z","lastTransitionTime":"2026-01-23T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.824239 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.824298 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.824310 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.824336 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.824350 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:37Z","lastTransitionTime":"2026-01-23T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.927154 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.927231 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.927243 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.927260 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:37 crc kubenswrapper[4711]: I0123 08:21:37.927270 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:37Z","lastTransitionTime":"2026-01-23T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.030756 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.030849 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.030868 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.030903 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.030936 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:38Z","lastTransitionTime":"2026-01-23T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.134574 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.134641 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.134654 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.134675 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.134688 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:38Z","lastTransitionTime":"2026-01-23T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.236435 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 10:28:54.49035233 +0000 UTC Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.236943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.236975 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.236986 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.237001 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.237012 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:38Z","lastTransitionTime":"2026-01-23T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.340591 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.340645 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.340656 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.340674 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.340686 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:38Z","lastTransitionTime":"2026-01-23T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.442674 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.442714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.442735 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.442754 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.442767 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:38Z","lastTransitionTime":"2026-01-23T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.545716 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.545764 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.545776 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.545798 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.545810 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:38Z","lastTransitionTime":"2026-01-23T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.648334 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.648377 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.648389 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.648405 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.648415 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:38Z","lastTransitionTime":"2026-01-23T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.750223 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.750277 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.750305 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.750329 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.750345 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:38Z","lastTransitionTime":"2026-01-23T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.853256 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.853312 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.853346 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.853367 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.853386 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:38Z","lastTransitionTime":"2026-01-23T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.956814 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.956902 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.956943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.956979 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:38 crc kubenswrapper[4711]: I0123 08:21:38.957003 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:38Z","lastTransitionTime":"2026-01-23T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.060160 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.060214 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.060231 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.060252 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.060268 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:39Z","lastTransitionTime":"2026-01-23T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.162931 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.162969 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.162980 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.163001 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.163014 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:39Z","lastTransitionTime":"2026-01-23T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.237547 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 12:23:01.079393575 +0000 UTC Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.266075 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.266138 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.266151 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.266174 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.266187 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:39Z","lastTransitionTime":"2026-01-23T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.369355 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.369412 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.369424 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.369441 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.369452 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:39Z","lastTransitionTime":"2026-01-23T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.472563 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.472604 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.472615 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.472631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.472643 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:39Z","lastTransitionTime":"2026-01-23T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.472690 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.472805 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:39 crc kubenswrapper[4711]: E0123 08:21:39.472840 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.472864 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:39 crc kubenswrapper[4711]: E0123 08:21:39.472985 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:39 crc kubenswrapper[4711]: E0123 08:21:39.473070 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.473216 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:39 crc kubenswrapper[4711]: E0123 08:21:39.473290 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.575735 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.575795 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.575806 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.575830 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.575854 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:39Z","lastTransitionTime":"2026-01-23T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.679016 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.679097 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.679123 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.679153 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.679179 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:39Z","lastTransitionTime":"2026-01-23T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.781581 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.781630 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.781641 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.781658 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.781673 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:39Z","lastTransitionTime":"2026-01-23T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.883487 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.883548 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.883557 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.883572 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.883581 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:39Z","lastTransitionTime":"2026-01-23T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.982560 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:21:39 crc kubenswrapper[4711]: E0123 08:21:39.982839 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:43.982806529 +0000 UTC m=+149.555762897 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.986219 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.986275 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.986286 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.986304 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:39 crc kubenswrapper[4711]: I0123 08:21:39.986317 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:39Z","lastTransitionTime":"2026-01-23T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.083278 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.083345 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.083380 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.083410 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:40 crc kubenswrapper[4711]: E0123 08:21:40.083533 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:21:40 crc kubenswrapper[4711]: E0123 08:21:40.083562 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:21:40 crc kubenswrapper[4711]: E0123 08:21:40.083579 4711 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:21:40 crc kubenswrapper[4711]: E0123 08:21:40.083594 4711 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:21:40 crc kubenswrapper[4711]: E0123 08:21:40.083623 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 23 08:21:40 crc kubenswrapper[4711]: E0123 08:21:40.083669 4711 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:21:40 crc kubenswrapper[4711]: E0123 08:21:40.083678 4711 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 23 08:21:40 crc kubenswrapper[4711]: E0123 08:21:40.083702 4711 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:21:40 crc kubenswrapper[4711]: E0123 08:21:40.083646 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-23 08:22:44.083624682 +0000 UTC m=+149.656581050 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:21:40 crc kubenswrapper[4711]: E0123 08:21:40.083809 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:22:44.083786256 +0000 UTC m=+149.656742624 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 23 08:21:40 crc kubenswrapper[4711]: E0123 08:21:40.083827 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-23 08:22:44.083816576 +0000 UTC m=+149.656772944 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 23 08:21:40 crc kubenswrapper[4711]: E0123 08:21:40.083841 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-23 08:22:44.083834037 +0000 UTC m=+149.656790515 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.088975 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.089010 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.089020 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.089036 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.089078 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:40Z","lastTransitionTime":"2026-01-23T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.191797 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.191832 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.191840 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.191858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.191868 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:40Z","lastTransitionTime":"2026-01-23T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.238678 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 17:29:43.978903395 +0000 UTC Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.294602 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.294676 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.294689 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.294714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.294727 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:40Z","lastTransitionTime":"2026-01-23T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.397689 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.397752 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.397769 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.397798 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.397817 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:40Z","lastTransitionTime":"2026-01-23T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.500857 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.500907 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.500919 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.500941 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.500954 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:40Z","lastTransitionTime":"2026-01-23T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.602782 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.602819 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.602828 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.602846 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.602855 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:40Z","lastTransitionTime":"2026-01-23T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.707041 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.707098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.707107 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.707124 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.707136 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:40Z","lastTransitionTime":"2026-01-23T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.808894 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.808940 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.808950 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.808966 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.808976 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:40Z","lastTransitionTime":"2026-01-23T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.911358 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.911394 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.911403 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.911419 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:40 crc kubenswrapper[4711]: I0123 08:21:40.911430 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:40Z","lastTransitionTime":"2026-01-23T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.014046 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.014099 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.014111 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.014129 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.014141 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:41Z","lastTransitionTime":"2026-01-23T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.117345 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.117407 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.117420 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.117444 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.117458 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:41Z","lastTransitionTime":"2026-01-23T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.219738 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.219804 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.219817 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.219882 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.219910 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:41Z","lastTransitionTime":"2026-01-23T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.238893 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 14:52:06.227426854 +0000 UTC Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.322500 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.322598 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.322611 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.322633 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.322646 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:41Z","lastTransitionTime":"2026-01-23T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.425930 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.425988 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.425997 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.426015 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.426028 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:41Z","lastTransitionTime":"2026-01-23T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.473709 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.473789 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:41 crc kubenswrapper[4711]: E0123 08:21:41.473883 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.474040 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.474105 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:41 crc kubenswrapper[4711]: E0123 08:21:41.474162 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:41 crc kubenswrapper[4711]: E0123 08:21:41.473997 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:41 crc kubenswrapper[4711]: E0123 08:21:41.474525 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.529102 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.529145 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.529155 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.529170 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.529180 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:41Z","lastTransitionTime":"2026-01-23T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.631890 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.631934 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.631952 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.631977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.632015 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:41Z","lastTransitionTime":"2026-01-23T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.734672 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.734732 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.734745 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.734771 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.734787 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:41Z","lastTransitionTime":"2026-01-23T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.837776 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.837853 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.837875 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.837908 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.837930 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:41Z","lastTransitionTime":"2026-01-23T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.940849 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.940902 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.940922 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.940947 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:41 crc kubenswrapper[4711]: I0123 08:21:41.940962 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:41Z","lastTransitionTime":"2026-01-23T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.043540 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.043575 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.043585 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.043600 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.043613 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:42Z","lastTransitionTime":"2026-01-23T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.146266 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.146343 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.146356 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.146373 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.146382 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:42Z","lastTransitionTime":"2026-01-23T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.239534 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 05:41:39.500517226 +0000 UTC Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.248640 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.248687 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.248702 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.248725 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.248741 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:42Z","lastTransitionTime":"2026-01-23T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.350857 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.350907 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.350917 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.350935 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.350949 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:42Z","lastTransitionTime":"2026-01-23T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.454147 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.454207 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.454220 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.454240 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.454254 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:42Z","lastTransitionTime":"2026-01-23T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.557573 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.557722 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.557737 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.557759 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.557782 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:42Z","lastTransitionTime":"2026-01-23T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.660848 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.661497 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.661627 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.661744 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.661808 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:42Z","lastTransitionTime":"2026-01-23T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.764922 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.764971 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.764984 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.765004 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.765017 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:42Z","lastTransitionTime":"2026-01-23T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.868296 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.868345 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.868359 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.868379 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.868392 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:42Z","lastTransitionTime":"2026-01-23T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.970758 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.970807 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.970817 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.970834 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:42 crc kubenswrapper[4711]: I0123 08:21:42.970848 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:42Z","lastTransitionTime":"2026-01-23T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.074053 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.074091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.074100 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.074115 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.074125 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:43Z","lastTransitionTime":"2026-01-23T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.177031 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.177074 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.177083 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.177098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.177112 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:43Z","lastTransitionTime":"2026-01-23T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.240329 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 18:21:52.095266609 +0000 UTC Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.280219 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.280279 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.280289 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.280307 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.280321 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:43Z","lastTransitionTime":"2026-01-23T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.383368 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.383420 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.383430 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.383447 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.383458 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:43Z","lastTransitionTime":"2026-01-23T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.473214 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.473277 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.473230 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:43 crc kubenswrapper[4711]: E0123 08:21:43.473433 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:43 crc kubenswrapper[4711]: E0123 08:21:43.473577 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:43 crc kubenswrapper[4711]: E0123 08:21:43.473675 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.473735 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:43 crc kubenswrapper[4711]: E0123 08:21:43.473796 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.486050 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.486121 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.486158 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.486173 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.486184 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:43Z","lastTransitionTime":"2026-01-23T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.589240 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.589286 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.589299 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.589319 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.589331 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:43Z","lastTransitionTime":"2026-01-23T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.692488 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.692555 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.692568 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.692587 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.692600 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:43Z","lastTransitionTime":"2026-01-23T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.795140 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.795449 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.795586 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.795693 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.795775 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:43Z","lastTransitionTime":"2026-01-23T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.898167 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.898227 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.898240 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.898262 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.898276 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:43Z","lastTransitionTime":"2026-01-23T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.995692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.995732 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.995742 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.995759 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:43 crc kubenswrapper[4711]: I0123 08:21:43.995769 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:43Z","lastTransitionTime":"2026-01-23T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: E0123 08:21:44.008872 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:43Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.014029 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.014101 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.014115 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.014136 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.014150 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: E0123 08:21:44.029059 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.032910 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.032954 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.032964 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.032981 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.032992 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: E0123 08:21:44.047786 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.052186 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.052228 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.052239 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.052256 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.052266 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: E0123 08:21:44.063418 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.068358 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.068412 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.068428 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.068448 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.068461 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: E0123 08:21:44.081129 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: E0123 08:21:44.081255 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.083463 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.083541 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.083554 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.083572 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.083584 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.185856 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.185905 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.185917 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.185938 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.185953 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.240673 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 01:50:44.146814237 +0000 UTC Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.289197 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.289253 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.289268 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.289288 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.289302 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.391565 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.391636 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.391646 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.391665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.391699 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.474161 4711 scope.go:117] "RemoveContainer" containerID="677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073" Jan 23 08:21:44 crc kubenswrapper[4711]: E0123 08:21:44.474378 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.488238 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.494737 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.494825 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.494838 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.494855 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.494867 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.500836 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.512760 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.524182 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a63253b-ad88-471e-a8c2-9ea74aa20b29\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05ca21b49a07b82a6e6767f1839594c7ead4801d43541cce740a12b11dca6f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bc864465d2dc98d051395f369a6ca2628f21b5f91ec4fa7f8432e1b28aa5e93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bc864465d2dc98d051395f369a6ca2628f21b5f91ec4fa7f8432e1b28aa5e93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.547376 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.561134 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.583324 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:30Z\\\",\\\"message\\\":\\\"2025-02-23 05:35:30 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[ingress.openshift.io/canary:canary_controller] map[service.beta.openshift.io/serving-cert-secret-name:canary-serving-cert] [{apps/v1 daemonset ingress-canary f5a2759b-dc3c-483d-93f0-055bac962b12 0xc00793cad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:8443-tcp,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},ServicePort{Name:8888-tcp,Protocol:TCP,Port:8888,TargetPort:{0 8888 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{ingresscanary.operator.openshift.io/daemonset-ingresscanary: canary_controller,},ClusterIP:10.217.5.34,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0123 08:21:30.021859 6669 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nI0123 08:21:30.021872 6669 services_controller.go:443] Built service open\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.596802 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.597271 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.597390 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.597524 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.597782 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.598086 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.609347 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.625728 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.640193 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.657485 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.674330 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eafae4831c9cebb04dc1fe0259fa32717a44734f39508d8cd162ae212d1429fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:28Z\\\",\\\"message\\\":\\\"2026-01-23T08:20:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37\\\\n2026-01-23T08:20:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37 to /host/opt/cni/bin/\\\\n2026-01-23T08:20:41Z [verbose] multus-daemon started\\\\n2026-01-23T08:20:41Z [verbose] Readiness Indicator file check\\\\n2026-01-23T08:21:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.685211 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.697857 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.700402 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.700451 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.700468 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.700528 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.700564 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.713660 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.724919 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.736873 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.749223 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:44Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.803748 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.803811 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.803824 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.803842 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.803857 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.906527 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.906585 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.906595 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.906613 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:44 crc kubenswrapper[4711]: I0123 08:21:44.906656 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:44Z","lastTransitionTime":"2026-01-23T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.009615 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.009684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.009703 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.009727 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.009747 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:45Z","lastTransitionTime":"2026-01-23T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.113453 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.113501 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.113546 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.113566 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.113581 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:45Z","lastTransitionTime":"2026-01-23T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.215943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.216211 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.216561 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.216782 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.216874 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:45Z","lastTransitionTime":"2026-01-23T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.241182 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 14:32:08.338027233 +0000 UTC Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.320236 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.320791 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.320810 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.320829 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.320850 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:45Z","lastTransitionTime":"2026-01-23T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.424045 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.424095 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.424103 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.424119 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.424130 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:45Z","lastTransitionTime":"2026-01-23T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.473637 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.473707 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.473648 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:45 crc kubenswrapper[4711]: E0123 08:21:45.473842 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:45 crc kubenswrapper[4711]: E0123 08:21:45.473953 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:45 crc kubenswrapper[4711]: E0123 08:21:45.474114 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.474234 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:45 crc kubenswrapper[4711]: E0123 08:21:45.474337 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.490025 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.503219 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.516366 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.526781 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.526866 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.526878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.526920 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.526933 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:45Z","lastTransitionTime":"2026-01-23T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.528611 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a63253b-ad88-471e-a8c2-9ea74aa20b29\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05ca21b49a07b82a6e6767f1839594c7ead4801d43541cce740a12b11dca6f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bc864465d2dc98d051395f369a6ca2628f21b5f91ec4fa7f8432e1b28aa5e93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bc864465d2dc98d051395f369a6ca2628f21b5f91ec4fa7f8432e1b28aa5e93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.552612 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.566422 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.579450 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.594495 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.609145 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.621660 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.630340 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.630388 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.630402 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.630425 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.630445 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:45Z","lastTransitionTime":"2026-01-23T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.632883 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.650890 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:30Z\\\",\\\"message\\\":\\\"2025-02-23 05:35:30 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[ingress.openshift.io/canary:canary_controller] map[service.beta.openshift.io/serving-cert-secret-name:canary-serving-cert] [{apps/v1 daemonset ingress-canary f5a2759b-dc3c-483d-93f0-055bac962b12 0xc00793cad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:8443-tcp,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},ServicePort{Name:8888-tcp,Protocol:TCP,Port:8888,TargetPort:{0 8888 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{ingresscanary.operator.openshift.io/daemonset-ingresscanary: canary_controller,},ClusterIP:10.217.5.34,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0123 08:21:30.021859 6669 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nI0123 08:21:30.021872 6669 services_controller.go:443] Built service open\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.659859 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.670687 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.688394 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.705286 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.725140 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.732815 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.732888 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.732902 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.732921 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.732935 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:45Z","lastTransitionTime":"2026-01-23T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.738823 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eafae4831c9cebb04dc1fe0259fa32717a44734f39508d8cd162ae212d1429fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:28Z\\\",\\\"message\\\":\\\"2026-01-23T08:20:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37\\\\n2026-01-23T08:20:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37 to /host/opt/cni/bin/\\\\n2026-01-23T08:20:41Z [verbose] multus-daemon started\\\\n2026-01-23T08:20:41Z [verbose] Readiness Indicator file check\\\\n2026-01-23T08:21:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.751359 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:45Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.835940 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.835980 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.835990 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.836009 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.836022 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:45Z","lastTransitionTime":"2026-01-23T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.938916 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.938973 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.938984 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.939003 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:45 crc kubenswrapper[4711]: I0123 08:21:45.939017 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:45Z","lastTransitionTime":"2026-01-23T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.041163 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.041201 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.041210 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.041227 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.041238 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:46Z","lastTransitionTime":"2026-01-23T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.144688 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.144752 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.144765 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.144786 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.144800 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:46Z","lastTransitionTime":"2026-01-23T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.241417 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 15:00:53.719030729 +0000 UTC Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.247308 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.247360 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.247373 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.247394 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.247408 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:46Z","lastTransitionTime":"2026-01-23T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.350992 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.351047 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.351060 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.351082 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.351095 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:46Z","lastTransitionTime":"2026-01-23T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.453873 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.453957 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.453976 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.454004 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.454022 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:46Z","lastTransitionTime":"2026-01-23T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.557772 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.557821 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.557834 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.557853 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.557864 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:46Z","lastTransitionTime":"2026-01-23T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.660445 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.660527 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.660544 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.660566 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.660581 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:46Z","lastTransitionTime":"2026-01-23T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.763437 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.763501 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.763538 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.763558 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.763570 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:46Z","lastTransitionTime":"2026-01-23T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.866344 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.866396 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.866411 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.866431 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.866444 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:46Z","lastTransitionTime":"2026-01-23T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.969874 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.969929 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.969941 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.969962 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:46 crc kubenswrapper[4711]: I0123 08:21:46.969977 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:46Z","lastTransitionTime":"2026-01-23T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.072758 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.072833 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.072856 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.072886 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.072909 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:47Z","lastTransitionTime":"2026-01-23T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.175769 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.175811 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.175821 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.175838 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.175860 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:47Z","lastTransitionTime":"2026-01-23T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.242085 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 05:37:03.684380524 +0000 UTC Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.278434 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.278490 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.278526 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.278546 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.278566 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:47Z","lastTransitionTime":"2026-01-23T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.381683 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.381752 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.381778 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.381803 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.381818 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:47Z","lastTransitionTime":"2026-01-23T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.473089 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.473180 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.473090 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:47 crc kubenswrapper[4711]: E0123 08:21:47.473264 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.473118 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:47 crc kubenswrapper[4711]: E0123 08:21:47.473374 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:47 crc kubenswrapper[4711]: E0123 08:21:47.473492 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:47 crc kubenswrapper[4711]: E0123 08:21:47.473592 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.483728 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.483774 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.483788 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.483812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.483827 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:47Z","lastTransitionTime":"2026-01-23T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.586790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.586866 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.586883 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.586911 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.586927 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:47Z","lastTransitionTime":"2026-01-23T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.689202 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.689240 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.689249 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.689263 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.689272 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:47Z","lastTransitionTime":"2026-01-23T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.792391 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.792446 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.792475 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.792499 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.792550 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:47Z","lastTransitionTime":"2026-01-23T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.895086 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.895151 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.895166 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.895190 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.895205 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:47Z","lastTransitionTime":"2026-01-23T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.998249 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.998336 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.998373 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.998411 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:47 crc kubenswrapper[4711]: I0123 08:21:47.998448 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:47Z","lastTransitionTime":"2026-01-23T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.102087 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.102154 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.102187 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.102225 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.102248 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:48Z","lastTransitionTime":"2026-01-23T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.205119 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.205159 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.205170 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.205193 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.205239 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:48Z","lastTransitionTime":"2026-01-23T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.250780 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 05:57:26.141849354 +0000 UTC Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.309256 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.309307 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.309319 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.309345 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.309364 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:48Z","lastTransitionTime":"2026-01-23T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.413614 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.413681 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.413693 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.413714 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.413732 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:48Z","lastTransitionTime":"2026-01-23T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.516695 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.516758 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.516773 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.516795 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.516807 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:48Z","lastTransitionTime":"2026-01-23T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.619883 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.619932 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.619944 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.619962 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.619974 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:48Z","lastTransitionTime":"2026-01-23T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.723069 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.723120 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.723133 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.723154 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.723165 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:48Z","lastTransitionTime":"2026-01-23T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.825976 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.826040 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.826096 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.826126 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.826144 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:48Z","lastTransitionTime":"2026-01-23T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.928596 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.928642 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.928653 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.928672 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:48 crc kubenswrapper[4711]: I0123 08:21:48.928684 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:48Z","lastTransitionTime":"2026-01-23T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.031804 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.031847 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.031858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.031876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.031887 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:49Z","lastTransitionTime":"2026-01-23T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.134660 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.134694 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.134703 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.134719 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.134730 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:49Z","lastTransitionTime":"2026-01-23T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.237046 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.237091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.237100 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.237121 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.237135 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:49Z","lastTransitionTime":"2026-01-23T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.251478 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 04:29:59.913998342 +0000 UTC Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.340177 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.340225 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.340236 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.340255 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.340268 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:49Z","lastTransitionTime":"2026-01-23T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.442860 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.442925 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.442953 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.442977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.442991 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:49Z","lastTransitionTime":"2026-01-23T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.473382 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.473431 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.473472 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.473386 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:49 crc kubenswrapper[4711]: E0123 08:21:49.473604 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:49 crc kubenswrapper[4711]: E0123 08:21:49.473925 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:49 crc kubenswrapper[4711]: E0123 08:21:49.473996 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:49 crc kubenswrapper[4711]: E0123 08:21:49.474053 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.545385 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.545440 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.545450 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.545468 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.545481 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:49Z","lastTransitionTime":"2026-01-23T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.648158 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.648203 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.648213 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.648227 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.648238 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:49Z","lastTransitionTime":"2026-01-23T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.750569 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.750636 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.750649 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.750665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.750676 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:49Z","lastTransitionTime":"2026-01-23T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.852799 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.852852 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.852864 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.852884 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.852899 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:49Z","lastTransitionTime":"2026-01-23T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.955377 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.955441 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.955454 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.955477 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:49 crc kubenswrapper[4711]: I0123 08:21:49.955492 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:49Z","lastTransitionTime":"2026-01-23T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.059187 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.059236 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.059246 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.059267 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.059281 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:50Z","lastTransitionTime":"2026-01-23T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.163467 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.163571 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.163600 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.163632 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.163654 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:50Z","lastTransitionTime":"2026-01-23T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.252639 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 15:34:24.755102098 +0000 UTC Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.267629 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.267677 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.267691 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.267710 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.267722 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:50Z","lastTransitionTime":"2026-01-23T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.370864 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.370920 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.370932 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.370953 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.370967 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:50Z","lastTransitionTime":"2026-01-23T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.474132 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.474192 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.474205 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.474228 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.474241 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:50Z","lastTransitionTime":"2026-01-23T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.578008 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.578167 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.578203 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.578244 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.578269 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:50Z","lastTransitionTime":"2026-01-23T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.681255 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.681303 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.681315 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.681333 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.681345 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:50Z","lastTransitionTime":"2026-01-23T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.784006 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.784063 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.784078 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.784096 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.784108 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:50Z","lastTransitionTime":"2026-01-23T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.888014 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.888104 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.888130 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.888168 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.888189 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:50Z","lastTransitionTime":"2026-01-23T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.991501 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.991609 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.991628 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.991660 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:50 crc kubenswrapper[4711]: I0123 08:21:50.991683 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:50Z","lastTransitionTime":"2026-01-23T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.095470 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.095593 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.095613 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.095642 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.095662 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:51Z","lastTransitionTime":"2026-01-23T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.197967 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.198026 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.198043 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.198067 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.198086 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:51Z","lastTransitionTime":"2026-01-23T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.253144 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 05:48:13.804444398 +0000 UTC Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.302066 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.302132 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.302146 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.302166 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.302182 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:51Z","lastTransitionTime":"2026-01-23T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.411913 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.412020 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.412039 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.412067 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.412085 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:51Z","lastTransitionTime":"2026-01-23T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.473666 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.473813 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.473818 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:51 crc kubenswrapper[4711]: E0123 08:21:51.473942 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.474266 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:51 crc kubenswrapper[4711]: E0123 08:21:51.474281 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:51 crc kubenswrapper[4711]: E0123 08:21:51.474363 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:51 crc kubenswrapper[4711]: E0123 08:21:51.474699 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.515433 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.515541 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.515560 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.515587 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.515608 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:51Z","lastTransitionTime":"2026-01-23T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.618674 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.618791 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.618818 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.618850 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.618884 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:51Z","lastTransitionTime":"2026-01-23T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.721384 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.721443 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.721455 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.721476 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.721495 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:51Z","lastTransitionTime":"2026-01-23T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.824668 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.824720 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.824735 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.824757 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.824771 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:51Z","lastTransitionTime":"2026-01-23T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.931209 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.931274 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.931423 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.931447 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:51 crc kubenswrapper[4711]: I0123 08:21:51.931466 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:51Z","lastTransitionTime":"2026-01-23T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.034200 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.034235 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.034246 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.034267 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.034284 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:52Z","lastTransitionTime":"2026-01-23T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.136946 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.136987 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.137001 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.137021 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.137032 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:52Z","lastTransitionTime":"2026-01-23T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.239822 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.239863 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.239875 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.239895 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.239908 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:52Z","lastTransitionTime":"2026-01-23T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.254264 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 12:14:11.574936872 +0000 UTC Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.343764 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.343821 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.343830 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.343848 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.343861 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:52Z","lastTransitionTime":"2026-01-23T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.446869 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.446939 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.446949 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.446970 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.446983 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:52Z","lastTransitionTime":"2026-01-23T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.549840 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.549899 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.549910 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.549930 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.549941 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:52Z","lastTransitionTime":"2026-01-23T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.652590 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.652641 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.652654 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.652673 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.652685 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:52Z","lastTransitionTime":"2026-01-23T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.755824 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.755871 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.755886 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.755904 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.755915 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:52Z","lastTransitionTime":"2026-01-23T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.858883 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.858948 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.858962 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.858980 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.858994 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:52Z","lastTransitionTime":"2026-01-23T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.961707 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.961812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.961824 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.961854 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:52 crc kubenswrapper[4711]: I0123 08:21:52.961865 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:52Z","lastTransitionTime":"2026-01-23T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.065773 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.065852 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.065868 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.065892 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.065908 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:53Z","lastTransitionTime":"2026-01-23T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.169358 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.169421 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.169434 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.169459 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.169473 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:53Z","lastTransitionTime":"2026-01-23T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.255399 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 13:34:59.691858991 +0000 UTC Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.272185 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.272254 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.272270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.272296 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.272318 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:53Z","lastTransitionTime":"2026-01-23T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.376189 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.376255 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.376270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.376293 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.376310 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:53Z","lastTransitionTime":"2026-01-23T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.473593 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.473666 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.473615 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:53 crc kubenswrapper[4711]: E0123 08:21:53.473876 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:53 crc kubenswrapper[4711]: E0123 08:21:53.473984 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.474056 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:53 crc kubenswrapper[4711]: E0123 08:21:53.474194 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:53 crc kubenswrapper[4711]: E0123 08:21:53.474357 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.479657 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.479702 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.479713 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.479764 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.479778 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:53Z","lastTransitionTime":"2026-01-23T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.583172 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.583230 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.583239 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.583258 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.583269 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:53Z","lastTransitionTime":"2026-01-23T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.686866 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.686936 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.686956 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.686985 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.687006 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:53Z","lastTransitionTime":"2026-01-23T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.790275 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.790323 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.790333 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.790348 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.790358 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:53Z","lastTransitionTime":"2026-01-23T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.892933 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.892990 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.893007 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.893037 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.893056 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:53Z","lastTransitionTime":"2026-01-23T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.995978 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.996091 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.996118 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.996149 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:53 crc kubenswrapper[4711]: I0123 08:21:53.996173 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:53Z","lastTransitionTime":"2026-01-23T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.099413 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.099479 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.099492 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.099530 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.099549 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.203303 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.203370 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.203384 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.203403 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.203416 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.218404 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.218483 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.218532 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.218561 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.218588 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: E0123 08:21:54.245722 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:54Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.251159 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.251441 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.251834 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.252053 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.252253 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.255599 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 00:02:09.802835767 +0000 UTC Jan 23 08:21:54 crc kubenswrapper[4711]: E0123 08:21:54.274336 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:54Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.280234 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.280289 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.280309 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.280335 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.280354 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: E0123 08:21:54.296401 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:54Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.300428 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.300604 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.300706 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.300789 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.300856 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: E0123 08:21:54.315651 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:54Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.320128 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.320282 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.320303 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.320332 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.320351 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: E0123 08:21:54.336448 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:54Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:54 crc kubenswrapper[4711]: E0123 08:21:54.336736 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.338720 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.338781 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.338804 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.338835 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.338858 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.443051 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.443133 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.443157 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.443188 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.443214 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.546771 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.546853 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.546874 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.546903 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.546924 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.650631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.650684 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.650698 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.650716 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.650728 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.753049 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.753108 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.753124 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.753156 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.753174 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.857099 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.857158 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.857171 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.857193 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.857208 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.960570 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.960627 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.960637 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.960661 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:54 crc kubenswrapper[4711]: I0123 08:21:54.960675 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:54Z","lastTransitionTime":"2026-01-23T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.063423 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.063495 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.063595 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.063627 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.063644 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:55Z","lastTransitionTime":"2026-01-23T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.166644 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.166704 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.166720 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.166740 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.166754 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:55Z","lastTransitionTime":"2026-01-23T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.256731 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 14:21:13.92939408 +0000 UTC Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.270062 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.270124 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.270141 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.270166 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.270185 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:55Z","lastTransitionTime":"2026-01-23T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.373244 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.373306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.373323 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.373351 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.373370 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:55Z","lastTransitionTime":"2026-01-23T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.473046 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.473098 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:55 crc kubenswrapper[4711]: E0123 08:21:55.473227 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.473289 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:55 crc kubenswrapper[4711]: E0123 08:21:55.473463 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:55 crc kubenswrapper[4711]: E0123 08:21:55.473644 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.473773 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:55 crc kubenswrapper[4711]: E0123 08:21:55.473904 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.475794 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.475874 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.475899 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.475926 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.475953 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:55Z","lastTransitionTime":"2026-01-23T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.492350 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc8b51d9-cfd3-4da4-a51e-0f9656820731\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22d539e656e760657b2820618c7f91f0d8d209f3833c28493e956b337fe15419\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://acad0227e70c5133aa85caa5c169ec7e78b11dc1efaa35ed5e85904f6275b9a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pkk77\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qh4pc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.510483 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gpch6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"23cac305-d4a6-4543-9585-c9d46409e12b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fde1086c15fdad0c2809f462eb84797bcd06083ee53d54090832c1bc9ded4b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-99g7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gpch6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.521908 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3846d4e0-cfda-4e0b-8747-85267de12736\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a4e3811169c966d4a0f2fe413a2f6612d3f83bd85d3e69698750ad4a60bd25ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhnmh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2t9r8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.546011 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"411b3595-dd4a-4ea6-a62d-2e3753783e6c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6a9bf944d11e9e60fe73c1edaa31da90b2268ec3e204717e0bc4e6784497e1a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://930b6e675553f3b15d255571644c60321ac7da4d11113abc02ab20ebaedb0e1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b681305915b6e1d9d1e75d8316def2d6c6e9a3d153303232cd611615aeac28a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://16388bb30a8ac13e945c677008cd5fd5a931963219296a5ee395fca61cfc0f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d2b98bc0749ef9dbf8fda209774ca0242be39f7e23e1d61c8ec88128addbea0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a11725e38a7949a746cc245edd22f527738b02edee88e7e43a5ccd9dcf74575\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0a5908e13d50b33e8519f74baae8557153634f637dc8458035bbbf12366528a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1bb7def49ce9b4872f7ec34c9347cdee8b187bced78d0eef5fa8e41857e0f3dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.564006 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://249cb4ec0122793c2478fa85bf28ee1363a9447e4efe608e7225df01b8e9f8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.577750 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.579307 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.579361 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.579374 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.579394 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.579407 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:55Z","lastTransitionTime":"2026-01-23T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.591025 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"125f4809-5a5d-4b9f-b97a-168a267c9537\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://549bb1207b8a5b8824d4d2ab4b89d035944f7f6bc183c9a8caa8071fc1cba429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1ff6b63a8ad73b123bad263546515261673b4a3ba04f7990f7a31700e5d35fc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded009a9b0dc17061578a35f2aab6e39b884397eb6619be77bcb47c8a6a0d444\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.601604 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a63253b-ad88-471e-a8c2-9ea74aa20b29\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://05ca21b49a07b82a6e6767f1839594c7ead4801d43541cce740a12b11dca6f3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bc864465d2dc98d051395f369a6ca2628f21b5f91ec4fa7f8432e1b28aa5e93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3bc864465d2dc98d051395f369a6ca2628f21b5f91ec4fa7f8432e1b28aa5e93\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.615942 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4446525ef10f4a67906f2169d71815ddf3e30ac2e73303cc63fc01a7743ff31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a85e637a0a7edc22a7b9007bbde9850a8266aecef9ef09065ce9ba01ca672f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.629706 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.650336 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e16bfd0e-30fd-4fcf-865b-63400b88cff3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:30Z\\\",\\\"message\\\":\\\"2025-02-23 05:35:30 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[ingress.openshift.io/canary:canary_controller] map[service.beta.openshift.io/serving-cert-secret-name:canary-serving-cert] [{apps/v1 daemonset ingress-canary f5a2759b-dc3c-483d-93f0-055bac962b12 0xc00793cad7 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:8443-tcp,Protocol:TCP,Port:8443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},ServicePort{Name:8888-tcp,Protocol:TCP,Port:8888,TargetPort:{0 8888 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{ingresscanary.operator.openshift.io/daemonset-ingresscanary: canary_controller,},ClusterIP:10.217.5.34,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.34],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0123 08:21:30.021859 6669 lb_config.go:1031] Cluster endpoints for openshift-ingress-canary/ingress-canary for network=default are: map[]\\\\nI0123 08:21:30.021872 6669 services_controller.go:443] Built service open\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mppnf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jmffw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.660862 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bkn9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80227686-5007-41d4-8d57-bcedb7564f78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31878cc5ae030d2a8f6d2a4f915e1d37b3661ab07fad492c8a16f551c8312933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-whkp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bkn9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.668272 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:55 crc kubenswrapper[4711]: E0123 08:21:55.668441 4711 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:21:55 crc kubenswrapper[4711]: E0123 08:21:55.668591 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs podName:f2bbf296-ae82-4cc3-b07d-bba10895a545 nodeName:}" failed. No retries permitted until 2026-01-23 08:22:59.668567575 +0000 UTC m=+165.241523943 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs") pod "network-metrics-daemon-zv6rd" (UID: "f2bbf296-ae82-4cc3-b07d-bba10895a545") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.670909 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2bbf296-ae82-4cc3-b07d-bba10895a545\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j5x4v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zv6rd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.682470 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.682528 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.682537 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.682555 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.682567 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:55Z","lastTransitionTime":"2026-01-23T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.685137 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f842319a-e130-4f1e-8aeb-d92f8ad00290\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0123 08:20:29.893909 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0123 08:20:29.895564 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3174199292/tls.crt::/tmp/serving-cert-3174199292/tls.key\\\\\\\"\\\\nI0123 08:20:35.606297 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0123 08:20:35.612573 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0123 08:20:35.612599 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0123 08:20:35.612645 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0123 08:20:35.612651 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0123 08:20:35.618000 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0123 08:20:35.618024 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618029 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0123 08:20:35.618034 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0123 08:20:35.618037 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0123 08:20:35.618041 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0123 08:20:35.618044 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0123 08:20:35.618182 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0123 08:20:35.621456 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.701450 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.718877 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-955cc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90368f73-4e1c-477c-a507-30d6108ac6a1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://912b7708346dae1c3f98b725f956912ba464744c3958f838a867f557311e0254\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8ed445a75dc404ec40c68294904317506d616e0e733a59218372a3360a74409\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fcd1d4a28e5568fecd675bcc422dc26fdec29b356282c0b06fd9ee165bd40f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49fb7db2037f87a1bf6c093633176f3fc848d3842beabb7586e98aec1139f68a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71c0afd08ea8a38ae405299b1d1b70e6190bb62ca83170a7d0b979461eaea37b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://89f651ea8ffe2f2d6c1861279275d1ab75ab29f11a870e02e6877115d5b47c80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://008d29d49f7c0843ec29de10119b5bb8873c0ff2667e09f5103c060991ea69ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsn7w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-955cc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.733304 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vpxkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8cc803a0-2626-4444-b4b2-8e9567277d44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eafae4831c9cebb04dc1fe0259fa32717a44734f39508d8cd162ae212d1429fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-23T08:21:28Z\\\",\\\"message\\\":\\\"2026-01-23T08:20:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37\\\\n2026-01-23T08:20:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_87fc06b8-1c5e-421c-9143-8a6997e4aa37 to /host/opt/cni/bin/\\\\n2026-01-23T08:20:41Z [verbose] multus-daemon started\\\\n2026-01-23T08:20:41Z [verbose] Readiness Indicator file check\\\\n2026-01-23T08:21:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:21:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pk2h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vpxkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.746704 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0caf5b03-2802-4381-a65b-2992843d72f5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82a1773b9c61c3f43909d5cb8f620a2895f394d7f9edac8e064a4a54c75d0ca2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0701a83849ad2ad78eaf4f7b3be208d5ddd62c30472085fb71fc1229ba9dd51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d00094f52482a5875f5386a0f3d403753adc0df59abb0b433ca8b449779a594\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78e6f5722fb27f8b13e225d318dd1ab84c23315f5f58a3ac1970ad9b0ce26fac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-23T08:20:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-23T08:20:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-23T08:20:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.760447 4711 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-23T08:20:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cf36ee239fe7a20e692feb7a4e3840e623bfb49d90e9bb8f95d8efad9301c70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-23T08:20:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:21:55Z is after 2025-08-24T17:21:41Z" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.784411 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.784481 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.784496 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.784553 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.784569 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:55Z","lastTransitionTime":"2026-01-23T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.886832 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.886885 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.886909 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.886935 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.886952 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:55Z","lastTransitionTime":"2026-01-23T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.989632 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.989688 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.989700 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.989720 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:55 crc kubenswrapper[4711]: I0123 08:21:55.989733 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:55Z","lastTransitionTime":"2026-01-23T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.091965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.092003 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.092012 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.092028 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.092038 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:56Z","lastTransitionTime":"2026-01-23T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.194904 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.195057 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.195071 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.195093 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.195106 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:56Z","lastTransitionTime":"2026-01-23T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.257101 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 00:23:50.802409704 +0000 UTC Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.298060 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.298148 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.298165 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.298193 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.298212 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:56Z","lastTransitionTime":"2026-01-23T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.402056 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.402114 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.402133 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.402160 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.402183 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:56Z","lastTransitionTime":"2026-01-23T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.506065 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.506130 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.506148 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.506175 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.506194 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:56Z","lastTransitionTime":"2026-01-23T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.618112 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.618229 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.618252 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.618319 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.618339 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:56Z","lastTransitionTime":"2026-01-23T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.722634 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.722709 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.722728 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.722759 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.722778 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:56Z","lastTransitionTime":"2026-01-23T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.825486 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.825530 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.825538 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.825574 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.825586 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:56Z","lastTransitionTime":"2026-01-23T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.928049 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.928101 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.928111 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.928130 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:56 crc kubenswrapper[4711]: I0123 08:21:56.928141 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:56Z","lastTransitionTime":"2026-01-23T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.031808 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.031889 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.031907 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.031943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.031962 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:57Z","lastTransitionTime":"2026-01-23T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.135331 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.135400 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.135417 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.135439 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.135452 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:57Z","lastTransitionTime":"2026-01-23T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.238604 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.238661 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.238678 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.238703 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.238729 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:57Z","lastTransitionTime":"2026-01-23T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.257750 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 10:37:39.331084932 +0000 UTC Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.341549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.341596 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.341612 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.341635 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.341650 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:57Z","lastTransitionTime":"2026-01-23T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.444779 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.444831 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.444839 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.444856 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.444865 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:57Z","lastTransitionTime":"2026-01-23T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.473184 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:57 crc kubenswrapper[4711]: E0123 08:21:57.474153 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.473269 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:57 crc kubenswrapper[4711]: E0123 08:21:57.474257 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.473253 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:57 crc kubenswrapper[4711]: E0123 08:21:57.474349 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.473494 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:57 crc kubenswrapper[4711]: E0123 08:21:57.474481 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.549073 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.549124 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.549141 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.549165 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.549186 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:57Z","lastTransitionTime":"2026-01-23T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.652791 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.652835 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.652847 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.652867 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.652879 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:57Z","lastTransitionTime":"2026-01-23T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.755767 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.755845 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.755867 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.755900 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.755938 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:57Z","lastTransitionTime":"2026-01-23T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.858829 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.858883 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.858895 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.858912 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.858925 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:57Z","lastTransitionTime":"2026-01-23T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.962197 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.962662 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.962749 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.963197 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:57 crc kubenswrapper[4711]: I0123 08:21:57.963458 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:57Z","lastTransitionTime":"2026-01-23T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.066625 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.066671 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.066683 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.066703 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.066715 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:58Z","lastTransitionTime":"2026-01-23T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.169392 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.169448 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.169460 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.169482 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.169495 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:58Z","lastTransitionTime":"2026-01-23T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.258983 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 14:45:48.478754306 +0000 UTC Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.272062 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.272105 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.272119 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.272138 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.272154 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:58Z","lastTransitionTime":"2026-01-23T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.376278 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.376344 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.376370 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.376399 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.376420 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:58Z","lastTransitionTime":"2026-01-23T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.480652 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.480737 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.480759 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.480815 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.480841 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:58Z","lastTransitionTime":"2026-01-23T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.583923 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.583970 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.583983 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.584001 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.584014 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:58Z","lastTransitionTime":"2026-01-23T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.686681 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.686747 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.686765 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.686791 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.686809 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:58Z","lastTransitionTime":"2026-01-23T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.789419 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.789878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.790047 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.790202 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.790345 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:58Z","lastTransitionTime":"2026-01-23T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.893417 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.893467 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.893478 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.893522 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.893535 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:58Z","lastTransitionTime":"2026-01-23T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.996917 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.996995 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.997011 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.997033 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:58 crc kubenswrapper[4711]: I0123 08:21:58.997058 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:58Z","lastTransitionTime":"2026-01-23T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.099749 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.100246 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.100324 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.100402 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.100523 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:59Z","lastTransitionTime":"2026-01-23T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.202923 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.202965 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.202978 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.202995 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.203006 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:59Z","lastTransitionTime":"2026-01-23T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.259114 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 03:04:40.887968108 +0000 UTC Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.305479 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.305600 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.305624 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.305657 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.305682 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:59Z","lastTransitionTime":"2026-01-23T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.408559 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.408607 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.408617 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.408631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.408642 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:59Z","lastTransitionTime":"2026-01-23T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.473485 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.473485 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:21:59 crc kubenswrapper[4711]: E0123 08:21:59.473666 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.473710 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.473797 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:21:59 crc kubenswrapper[4711]: E0123 08:21:59.473749 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:21:59 crc kubenswrapper[4711]: E0123 08:21:59.473894 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:21:59 crc kubenswrapper[4711]: E0123 08:21:59.473979 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.474715 4711 scope.go:117] "RemoveContainer" containerID="677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073" Jan 23 08:21:59 crc kubenswrapper[4711]: E0123 08:21:59.474882 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jmffw_openshift-ovn-kubernetes(e16bfd0e-30fd-4fcf-865b-63400b88cff3)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.510939 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.510986 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.510999 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.511021 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.511035 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:59Z","lastTransitionTime":"2026-01-23T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.613642 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.613692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.613705 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.613724 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.613738 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:59Z","lastTransitionTime":"2026-01-23T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.716831 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.716891 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.716903 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.716922 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.716935 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:59Z","lastTransitionTime":"2026-01-23T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.820155 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.820205 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.820220 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.820242 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.820256 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:59Z","lastTransitionTime":"2026-01-23T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.922559 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.922598 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.922607 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.922622 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:21:59 crc kubenswrapper[4711]: I0123 08:21:59.922631 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:21:59Z","lastTransitionTime":"2026-01-23T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.025230 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.025266 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.025305 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.025323 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.025334 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:00Z","lastTransitionTime":"2026-01-23T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.127705 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.127748 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.127758 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.127780 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.127792 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:00Z","lastTransitionTime":"2026-01-23T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.230627 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.230685 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.230698 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.230718 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.230732 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:00Z","lastTransitionTime":"2026-01-23T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.259355 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 03:06:18.652392453 +0000 UTC Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.333434 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.333493 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.333529 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.333552 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.333566 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:00Z","lastTransitionTime":"2026-01-23T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.437033 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.437100 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.437118 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.437143 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.437159 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:00Z","lastTransitionTime":"2026-01-23T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.540436 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.540559 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.540570 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.540586 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.540596 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:00Z","lastTransitionTime":"2026-01-23T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.643237 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.643294 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.643317 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.643341 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.643356 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:00Z","lastTransitionTime":"2026-01-23T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.746242 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.746291 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.746301 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.746322 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.746341 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:00Z","lastTransitionTime":"2026-01-23T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.848844 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.848891 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.848901 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.848918 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.848929 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:00Z","lastTransitionTime":"2026-01-23T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.951682 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.951747 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.951757 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.951774 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:00 crc kubenswrapper[4711]: I0123 08:22:00.951785 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:00Z","lastTransitionTime":"2026-01-23T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.054449 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.054491 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.054523 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.054549 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.054562 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:01Z","lastTransitionTime":"2026-01-23T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.157722 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.157765 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.157774 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.157791 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.157802 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:01Z","lastTransitionTime":"2026-01-23T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.259555 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 12:48:30.268542439 +0000 UTC Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.260729 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.260768 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.260779 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.260795 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.260805 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:01Z","lastTransitionTime":"2026-01-23T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.364461 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.364593 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.364613 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.364645 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.364664 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:01Z","lastTransitionTime":"2026-01-23T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.467646 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.467692 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.467704 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.467722 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.467738 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:01Z","lastTransitionTime":"2026-01-23T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.472789 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.472846 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.472872 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:01 crc kubenswrapper[4711]: E0123 08:22:01.472972 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.473810 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:01 crc kubenswrapper[4711]: E0123 08:22:01.473908 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:01 crc kubenswrapper[4711]: E0123 08:22:01.474067 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:01 crc kubenswrapper[4711]: E0123 08:22:01.474962 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.570082 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.570144 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.570160 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.570179 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.570191 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:01Z","lastTransitionTime":"2026-01-23T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.673259 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.673335 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.673359 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.673391 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.673414 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:01Z","lastTransitionTime":"2026-01-23T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.776873 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.776938 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.776949 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.776968 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.776981 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:01Z","lastTransitionTime":"2026-01-23T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.879871 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.879935 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.879951 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.879977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.879997 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:01Z","lastTransitionTime":"2026-01-23T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.983102 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.983170 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.983187 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.983211 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:01 crc kubenswrapper[4711]: I0123 08:22:01.983231 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:01Z","lastTransitionTime":"2026-01-23T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.086887 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.086952 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.086969 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.086993 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.087009 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:02Z","lastTransitionTime":"2026-01-23T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.189637 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.189671 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.189680 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.189694 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.189704 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:02Z","lastTransitionTime":"2026-01-23T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.260383 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 06:42:45.657313269 +0000 UTC Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.293193 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.293257 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.293274 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.293301 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.293315 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:02Z","lastTransitionTime":"2026-01-23T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.395659 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.395712 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.395726 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.395742 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.395753 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:02Z","lastTransitionTime":"2026-01-23T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.498992 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.499055 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.499068 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.499095 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.499111 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:02Z","lastTransitionTime":"2026-01-23T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.604645 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.604709 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.604720 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.604743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.604760 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:02Z","lastTransitionTime":"2026-01-23T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.708682 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.708761 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.708775 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.708797 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.708810 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:02Z","lastTransitionTime":"2026-01-23T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.812299 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.812354 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.812369 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.812385 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.812396 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:02Z","lastTransitionTime":"2026-01-23T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.915768 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.915837 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.915852 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.915876 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:02 crc kubenswrapper[4711]: I0123 08:22:02.915893 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:02Z","lastTransitionTime":"2026-01-23T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.019077 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.019140 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.019153 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.019177 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.019192 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:03Z","lastTransitionTime":"2026-01-23T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.121679 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.121736 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.121748 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.121771 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.121786 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:03Z","lastTransitionTime":"2026-01-23T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.224578 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.224645 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.224658 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.224678 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.224697 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:03Z","lastTransitionTime":"2026-01-23T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.261559 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 01:35:43.51105121 +0000 UTC Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.328006 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.328062 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.328074 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.328095 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.328110 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:03Z","lastTransitionTime":"2026-01-23T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.431756 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.431805 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.431816 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.431834 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.431848 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:03Z","lastTransitionTime":"2026-01-23T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.473668 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.473730 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.473828 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:03 crc kubenswrapper[4711]: E0123 08:22:03.473892 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.473679 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:03 crc kubenswrapper[4711]: E0123 08:22:03.474051 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:03 crc kubenswrapper[4711]: E0123 08:22:03.474156 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:03 crc kubenswrapper[4711]: E0123 08:22:03.474237 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.534428 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.534474 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.534488 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.534533 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.534551 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:03Z","lastTransitionTime":"2026-01-23T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.637365 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.637407 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.637419 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.637439 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.637450 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:03Z","lastTransitionTime":"2026-01-23T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.741321 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.741390 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.741428 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.741465 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.741489 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:03Z","lastTransitionTime":"2026-01-23T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.844805 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.844869 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.844881 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.844902 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.844917 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:03Z","lastTransitionTime":"2026-01-23T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.948831 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.948894 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.948908 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.948932 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:03 crc kubenswrapper[4711]: I0123 08:22:03.948949 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:03Z","lastTransitionTime":"2026-01-23T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.051739 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.051825 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.051848 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.051879 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.051902 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.154700 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.154788 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.154812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.154844 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.154866 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.258036 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.258083 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.258092 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.258110 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.258121 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.262624 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 04:58:48.201841818 +0000 UTC Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.366502 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.366574 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.366586 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.366605 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.366617 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.469929 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.469989 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.470001 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.470020 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.470030 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.475548 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.475589 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.475647 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.475662 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.475674 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: E0123 08:22:04.489386 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:22:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.494991 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.495069 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.495087 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.495115 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.495135 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: E0123 08:22:04.509448 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:22:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.513038 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.513083 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.513094 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.513117 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.513134 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: E0123 08:22:04.528029 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:22:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.533108 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.533174 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.533185 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.533207 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.533219 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: E0123 08:22:04.549046 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:22:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.552883 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.552955 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.552972 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.552999 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.553017 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: E0123 08:22:04.564680 4711 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-23T08:22:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"27dc1c1b-2120-44f4-9f06-7adb0d52081c\\\",\\\"systemUUID\\\":\\\"48b4ba17-ccdd-4448-a1d6-e418ae5877df\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-23T08:22:04Z is after 2025-08-24T17:21:41Z" Jan 23 08:22:04 crc kubenswrapper[4711]: E0123 08:22:04.564913 4711 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.572992 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.573038 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.573047 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.573066 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.573078 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.676156 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.676219 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.676233 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.676253 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.676270 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.778412 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.778478 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.778498 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.778552 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.778570 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.882078 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.882166 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.882180 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.882227 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.882244 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.985713 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.985817 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.985838 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.985859 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:04 crc kubenswrapper[4711]: I0123 08:22:04.985874 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:04Z","lastTransitionTime":"2026-01-23T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.088920 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.089006 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.089025 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.089057 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.089077 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:05Z","lastTransitionTime":"2026-01-23T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.193113 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.193193 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.193220 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.193258 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.193291 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:05Z","lastTransitionTime":"2026-01-23T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.263591 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 03:21:08.178471081 +0000 UTC Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.297061 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.297127 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.297141 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.297165 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.297180 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:05Z","lastTransitionTime":"2026-01-23T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.399857 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.399907 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.399917 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.399935 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.399946 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:05Z","lastTransitionTime":"2026-01-23T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.473603 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:05 crc kubenswrapper[4711]: E0123 08:22:05.473830 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.473859 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.473951 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:05 crc kubenswrapper[4711]: E0123 08:22:05.474106 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.474150 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:05 crc kubenswrapper[4711]: E0123 08:22:05.474288 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:05 crc kubenswrapper[4711]: E0123 08:22:05.474413 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.500041 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-gpch6" podStartSLOduration=89.500020573 podStartE2EDuration="1m29.500020573s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:05.49949436 +0000 UTC m=+111.072450748" watchObservedRunningTime="2026-01-23 08:22:05.500020573 +0000 UTC m=+111.072976941" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.502738 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.502800 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.502824 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.502855 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.502877 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:05Z","lastTransitionTime":"2026-01-23T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.514834 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podStartSLOduration=89.514692379 podStartE2EDuration="1m29.514692379s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:05.513756055 +0000 UTC m=+111.086712433" watchObservedRunningTime="2026-01-23 08:22:05.514692379 +0000 UTC m=+111.087648767" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.552920 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=90.552880601 podStartE2EDuration="1m30.552880601s" podCreationTimestamp="2026-01-23 08:20:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:05.551405674 +0000 UTC m=+111.124362062" watchObservedRunningTime="2026-01-23 08:22:05.552880601 +0000 UTC m=+111.125836969" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.553350 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qh4pc" podStartSLOduration=88.553343142 podStartE2EDuration="1m28.553343142s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:05.534328799 +0000 UTC m=+111.107285217" watchObservedRunningTime="2026-01-23 08:22:05.553343142 +0000 UTC m=+111.126299510" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.599332 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=36.599305247 podStartE2EDuration="36.599305247s" podCreationTimestamp="2026-01-23 08:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:05.571274659 +0000 UTC m=+111.144231047" watchObservedRunningTime="2026-01-23 08:22:05.599305247 +0000 UTC m=+111.172261615" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.605706 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.605750 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.605766 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.605786 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.605801 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:05Z","lastTransitionTime":"2026-01-23T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.614454 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=86.614410884 podStartE2EDuration="1m26.614410884s" podCreationTimestamp="2026-01-23 08:20:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:05.599271187 +0000 UTC m=+111.172227555" watchObservedRunningTime="2026-01-23 08:22:05.614410884 +0000 UTC m=+111.187367252" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.647784 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=90.647760335 podStartE2EDuration="1m30.647760335s" podCreationTimestamp="2026-01-23 08:20:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:05.646385831 +0000 UTC m=+111.219342199" watchObservedRunningTime="2026-01-23 08:22:05.647760335 +0000 UTC m=+111.220716693" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.708089 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.708137 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.708149 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.708165 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.708175 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:05Z","lastTransitionTime":"2026-01-23T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.727488 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-bkn9c" podStartSLOduration=89.727467772 podStartE2EDuration="1m29.727467772s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:05.716644962 +0000 UTC m=+111.289601330" watchObservedRunningTime="2026-01-23 08:22:05.727467772 +0000 UTC m=+111.300424150" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.738096 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=61.738074496 podStartE2EDuration="1m1.738074496s" podCreationTimestamp="2026-01-23 08:21:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:05.737660685 +0000 UTC m=+111.310617063" watchObservedRunningTime="2026-01-23 08:22:05.738074496 +0000 UTC m=+111.311030864" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.764478 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-955cc" podStartSLOduration=89.764455243 podStartE2EDuration="1m29.764455243s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:05.763851558 +0000 UTC m=+111.336807926" watchObservedRunningTime="2026-01-23 08:22:05.764455243 +0000 UTC m=+111.337411611" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.776746 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-vpxkq" podStartSLOduration=89.776716399 podStartE2EDuration="1m29.776716399s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:05.776305029 +0000 UTC m=+111.349261397" watchObservedRunningTime="2026-01-23 08:22:05.776716399 +0000 UTC m=+111.349672767" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.811012 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.811068 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.811086 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.811106 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.811122 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:05Z","lastTransitionTime":"2026-01-23T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.914451 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.914726 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.914813 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.914912 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:05 crc kubenswrapper[4711]: I0123 08:22:05.914977 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:05Z","lastTransitionTime":"2026-01-23T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.018180 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.018235 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.018247 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.018266 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.018279 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:06Z","lastTransitionTime":"2026-01-23T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.120786 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.120849 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.120872 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.120897 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.120910 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:06Z","lastTransitionTime":"2026-01-23T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.223601 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.223646 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.223657 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.223675 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.223687 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:06Z","lastTransitionTime":"2026-01-23T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.264808 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 10:27:13.466505053 +0000 UTC Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.326829 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.327318 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.327686 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.327944 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.328084 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:06Z","lastTransitionTime":"2026-01-23T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.432263 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.432363 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.432379 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.432397 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.432408 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:06Z","lastTransitionTime":"2026-01-23T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.535156 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.535193 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.535203 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.535219 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.535229 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:06Z","lastTransitionTime":"2026-01-23T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.637802 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.637846 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.637858 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.637881 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.637896 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:06Z","lastTransitionTime":"2026-01-23T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.740397 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.740435 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.740444 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.740466 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.740487 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:06Z","lastTransitionTime":"2026-01-23T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.843137 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.843185 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.843195 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.843212 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.843223 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:06Z","lastTransitionTime":"2026-01-23T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.945982 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.946029 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.946041 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.946060 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:06 crc kubenswrapper[4711]: I0123 08:22:06.946074 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:06Z","lastTransitionTime":"2026-01-23T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.049114 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.049153 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.049164 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.049181 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.049192 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:07Z","lastTransitionTime":"2026-01-23T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.152369 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.152423 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.152435 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.152457 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.152472 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:07Z","lastTransitionTime":"2026-01-23T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.255036 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.255120 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.255154 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.255185 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.255208 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:07Z","lastTransitionTime":"2026-01-23T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.265494 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 06:31:32.930115694 +0000 UTC Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.359346 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.359406 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.359417 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.359437 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.359452 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:07Z","lastTransitionTime":"2026-01-23T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.461938 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.462000 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.462017 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.462042 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.462061 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:07Z","lastTransitionTime":"2026-01-23T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.473494 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.473567 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.473617 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.473894 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:07 crc kubenswrapper[4711]: E0123 08:22:07.473977 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:07 crc kubenswrapper[4711]: E0123 08:22:07.474156 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:07 crc kubenswrapper[4711]: E0123 08:22:07.474211 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:07 crc kubenswrapper[4711]: E0123 08:22:07.474275 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.564517 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.564563 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.564597 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.564621 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.564633 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:07Z","lastTransitionTime":"2026-01-23T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.666533 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.666574 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.666585 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.666602 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.666620 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:07Z","lastTransitionTime":"2026-01-23T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.770453 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.770544 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.770557 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.770588 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.770602 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:07Z","lastTransitionTime":"2026-01-23T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.873730 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.873786 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.873799 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.873820 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.873835 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:07Z","lastTransitionTime":"2026-01-23T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.976562 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.976611 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.976621 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.976642 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:07 crc kubenswrapper[4711]: I0123 08:22:07.976652 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:07Z","lastTransitionTime":"2026-01-23T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.079408 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.079548 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.079579 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.079612 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.079636 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:08Z","lastTransitionTime":"2026-01-23T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.182160 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.182216 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.182226 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.182242 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.182256 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:08Z","lastTransitionTime":"2026-01-23T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.265709 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 19:36:28.887930158 +0000 UTC Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.285241 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.285291 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.285305 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.285322 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.285334 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:08Z","lastTransitionTime":"2026-01-23T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.388201 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.388243 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.388252 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.388268 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.388278 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:08Z","lastTransitionTime":"2026-01-23T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.490318 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.490363 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.490374 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.490388 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.490400 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:08Z","lastTransitionTime":"2026-01-23T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.594302 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.594376 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.594402 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.594437 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.594464 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:08Z","lastTransitionTime":"2026-01-23T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.697615 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.697663 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.697677 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.697697 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.697709 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:08Z","lastTransitionTime":"2026-01-23T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.801320 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.801363 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.801373 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.801390 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.801400 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:08Z","lastTransitionTime":"2026-01-23T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.904969 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.905018 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.905032 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.905050 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:08 crc kubenswrapper[4711]: I0123 08:22:08.905064 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:08Z","lastTransitionTime":"2026-01-23T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.008142 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.008216 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.008229 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.008270 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.008287 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:09Z","lastTransitionTime":"2026-01-23T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.111490 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.111625 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.111653 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.111685 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.111712 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:09Z","lastTransitionTime":"2026-01-23T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.215082 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.215138 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.215147 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.215165 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.215175 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:09Z","lastTransitionTime":"2026-01-23T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.265946 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 04:27:40.662039131 +0000 UTC Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.317869 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.317936 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.317954 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.317981 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.318006 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:09Z","lastTransitionTime":"2026-01-23T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.421709 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.421769 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.421787 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.421812 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.421833 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:09Z","lastTransitionTime":"2026-01-23T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.473147 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.473255 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.473298 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:09 crc kubenswrapper[4711]: E0123 08:22:09.473365 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.473256 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:09 crc kubenswrapper[4711]: E0123 08:22:09.473566 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:09 crc kubenswrapper[4711]: E0123 08:22:09.473655 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:09 crc kubenswrapper[4711]: E0123 08:22:09.473812 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.525816 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.525908 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.525925 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.525951 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.525967 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:09Z","lastTransitionTime":"2026-01-23T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.628807 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.628849 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.628859 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.628892 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.628905 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:09Z","lastTransitionTime":"2026-01-23T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.732472 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.732617 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.732683 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.732736 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.732802 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:09Z","lastTransitionTime":"2026-01-23T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.835793 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.835866 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.835897 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.835928 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.835952 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:09Z","lastTransitionTime":"2026-01-23T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.939354 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.939437 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.939457 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.939483 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:09 crc kubenswrapper[4711]: I0123 08:22:09.939501 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:09Z","lastTransitionTime":"2026-01-23T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.042832 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.042894 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.042907 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.042925 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.042937 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:10Z","lastTransitionTime":"2026-01-23T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.146877 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.146943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.146967 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.146998 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.147016 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:10Z","lastTransitionTime":"2026-01-23T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.250354 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.250407 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.250419 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.250439 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.250458 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:10Z","lastTransitionTime":"2026-01-23T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.266220 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 21:04:13.732074097 +0000 UTC Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.352842 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.352931 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.352949 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.352977 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.352995 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:10Z","lastTransitionTime":"2026-01-23T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.455782 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.455866 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.455885 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.455910 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.455932 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:10Z","lastTransitionTime":"2026-01-23T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.558235 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.558316 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.558339 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.558374 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.558399 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:10Z","lastTransitionTime":"2026-01-23T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.661566 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.661628 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.661641 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.661665 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.661679 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:10Z","lastTransitionTime":"2026-01-23T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.764604 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.764691 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.764711 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.764733 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.764752 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:10Z","lastTransitionTime":"2026-01-23T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.867955 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.868014 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.868027 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.868053 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.868079 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:10Z","lastTransitionTime":"2026-01-23T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.971319 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.971370 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.971381 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.971400 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:10 crc kubenswrapper[4711]: I0123 08:22:10.971412 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:10Z","lastTransitionTime":"2026-01-23T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.074854 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.074911 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.074923 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.074943 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.074956 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:11Z","lastTransitionTime":"2026-01-23T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.178244 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.178288 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.178306 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.178329 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.178341 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:11Z","lastTransitionTime":"2026-01-23T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.266405 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 19:37:21.870969204 +0000 UTC Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.281052 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.281106 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.281119 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.281140 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.281157 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:11Z","lastTransitionTime":"2026-01-23T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.384402 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.384459 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.384469 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.384493 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.384528 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:11Z","lastTransitionTime":"2026-01-23T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.473352 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.473397 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.473446 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.473680 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:11 crc kubenswrapper[4711]: E0123 08:22:11.473691 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:11 crc kubenswrapper[4711]: E0123 08:22:11.474353 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:11 crc kubenswrapper[4711]: E0123 08:22:11.475051 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:11 crc kubenswrapper[4711]: E0123 08:22:11.475348 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.486746 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.486795 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.486806 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.486824 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.486835 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:11Z","lastTransitionTime":"2026-01-23T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.589734 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.589790 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.589804 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.589825 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.589840 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:11Z","lastTransitionTime":"2026-01-23T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.693685 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.693743 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.693755 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.693773 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.693788 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:11Z","lastTransitionTime":"2026-01-23T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.796894 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.796931 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.796942 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.796958 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.796968 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:11Z","lastTransitionTime":"2026-01-23T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.899940 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.899997 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.900018 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.900042 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:11 crc kubenswrapper[4711]: I0123 08:22:11.900059 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:11Z","lastTransitionTime":"2026-01-23T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.003075 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.003126 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.003135 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.003151 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.003165 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:12Z","lastTransitionTime":"2026-01-23T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.106378 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.106439 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.106455 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.106474 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.106484 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:12Z","lastTransitionTime":"2026-01-23T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.209370 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.209440 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.209458 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.209482 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.209500 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:12Z","lastTransitionTime":"2026-01-23T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.266809 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 19:10:54.8371696 +0000 UTC Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.311773 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.311829 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.311841 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.311862 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.311876 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:12Z","lastTransitionTime":"2026-01-23T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.414807 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.414878 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.414894 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.414914 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.414928 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:12Z","lastTransitionTime":"2026-01-23T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.517547 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.517610 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.517630 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.517653 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.517672 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:12Z","lastTransitionTime":"2026-01-23T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.620619 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.620666 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.620679 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.620697 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.620711 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:12Z","lastTransitionTime":"2026-01-23T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.723544 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.723618 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.723634 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.723663 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.723683 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:12Z","lastTransitionTime":"2026-01-23T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.827335 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.827853 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.827944 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.828031 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.828115 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:12Z","lastTransitionTime":"2026-01-23T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.931424 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.931494 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.931603 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.931635 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:12 crc kubenswrapper[4711]: I0123 08:22:12.931656 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:12Z","lastTransitionTime":"2026-01-23T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.034090 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.034533 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.034631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.034744 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.034847 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:13Z","lastTransitionTime":"2026-01-23T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.138043 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.138098 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.138111 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.138134 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.138148 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:13Z","lastTransitionTime":"2026-01-23T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.240776 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.241045 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.241286 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.241454 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.241620 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:13Z","lastTransitionTime":"2026-01-23T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.267589 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 21:58:27.060855937 +0000 UTC Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.344981 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.345039 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.345055 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.345079 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.345097 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:13Z","lastTransitionTime":"2026-01-23T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.447375 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.447415 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.447424 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.447440 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.447451 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:13Z","lastTransitionTime":"2026-01-23T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.473265 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:13 crc kubenswrapper[4711]: E0123 08:22:13.473401 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.473471 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.473400 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.473963 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:13 crc kubenswrapper[4711]: E0123 08:22:13.474042 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:13 crc kubenswrapper[4711]: E0123 08:22:13.474124 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:13 crc kubenswrapper[4711]: E0123 08:22:13.474168 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.474729 4711 scope.go:117] "RemoveContainer" containerID="677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.550669 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.550905 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.551004 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.551077 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.551145 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:13Z","lastTransitionTime":"2026-01-23T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.653827 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.653870 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.653882 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.653900 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.653911 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:13Z","lastTransitionTime":"2026-01-23T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.756550 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.756597 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.756610 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.756631 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.756644 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:13Z","lastTransitionTime":"2026-01-23T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.858923 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.858952 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.858963 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.858979 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.858989 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:13Z","lastTransitionTime":"2026-01-23T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.961045 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.961083 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.961093 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.961109 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:13 crc kubenswrapper[4711]: I0123 08:22:13.961122 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:13Z","lastTransitionTime":"2026-01-23T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.063389 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.063416 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.063424 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.063440 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.063449 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:14Z","lastTransitionTime":"2026-01-23T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.165581 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.165626 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.165636 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.165653 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.165664 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:14Z","lastTransitionTime":"2026-01-23T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.268279 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 02:49:26.71155159 +0000 UTC Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.268663 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.268694 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.268705 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.268722 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.268733 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:14Z","lastTransitionTime":"2026-01-23T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.272059 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-zv6rd"] Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.282942 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/3.log" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.286379 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerStarted","Data":"2b9ff6ab4f162a1d535c08524bef44284e952c46441e4985ab3a72434482d09e"} Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.286464 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:14 crc kubenswrapper[4711]: E0123 08:22:14.286654 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.318257 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podStartSLOduration=97.318236611 podStartE2EDuration="1m37.318236611s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:14.317321759 +0000 UTC m=+119.890278137" watchObservedRunningTime="2026-01-23 08:22:14.318236611 +0000 UTC m=+119.891192979" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.372283 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.372349 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.372367 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.372392 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.372407 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:14Z","lastTransitionTime":"2026-01-23T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.484952 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.485020 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.485034 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.485071 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.485082 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:14Z","lastTransitionTime":"2026-01-23T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.588348 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.588410 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.588424 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.588442 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.588452 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:14Z","lastTransitionTime":"2026-01-23T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.691381 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.691429 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.691443 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.691465 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.691476 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:14Z","lastTransitionTime":"2026-01-23T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.795140 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.795178 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.795209 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.795227 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.795239 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:14Z","lastTransitionTime":"2026-01-23T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.898819 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.898883 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.898893 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.898915 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.898936 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:14Z","lastTransitionTime":"2026-01-23T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.944899 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.944945 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.944957 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.944976 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.944987 4711 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-23T08:22:14Z","lastTransitionTime":"2026-01-23T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.993441 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p"] Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.993995 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.995303 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.995784 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.996131 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 23 08:22:14 crc kubenswrapper[4711]: I0123 08:22:14.999258 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.090665 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/97057697-e608-4882-a3be-dcb4e961ada4-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.090733 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/97057697-e608-4882-a3be-dcb4e961ada4-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.090871 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/97057697-e608-4882-a3be-dcb4e961ada4-service-ca\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.091019 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97057697-e608-4882-a3be-dcb4e961ada4-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.091228 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/97057697-e608-4882-a3be-dcb4e961ada4-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.192705 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/97057697-e608-4882-a3be-dcb4e961ada4-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.193075 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/97057697-e608-4882-a3be-dcb4e961ada4-service-ca\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.192821 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/97057697-e608-4882-a3be-dcb4e961ada4-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.193146 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97057697-e608-4882-a3be-dcb4e961ada4-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.193224 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/97057697-e608-4882-a3be-dcb4e961ada4-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.193268 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/97057697-e608-4882-a3be-dcb4e961ada4-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.193633 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/97057697-e608-4882-a3be-dcb4e961ada4-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.194150 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/97057697-e608-4882-a3be-dcb4e961ada4-service-ca\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.199640 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97057697-e608-4882-a3be-dcb4e961ada4-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: E0123 08:22:15.205661 4711 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.211210 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/97057697-e608-4882-a3be-dcb4e961ada4-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-j564p\" (UID: \"97057697-e608-4882-a3be-dcb4e961ada4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.269400 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 17:14:44.354216709 +0000 UTC Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.269493 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.276302 4711 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.291761 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpxkq_8cc803a0-2626-4444-b4b2-8e9567277d44/kube-multus/1.log" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.292230 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpxkq_8cc803a0-2626-4444-b4b2-8e9567277d44/kube-multus/0.log" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.292312 4711 generic.go:334] "Generic (PLEG): container finished" podID="8cc803a0-2626-4444-b4b2-8e9567277d44" containerID="eafae4831c9cebb04dc1fe0259fa32717a44734f39508d8cd162ae212d1429fa" exitCode=1 Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.292353 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpxkq" event={"ID":"8cc803a0-2626-4444-b4b2-8e9567277d44","Type":"ContainerDied","Data":"eafae4831c9cebb04dc1fe0259fa32717a44734f39508d8cd162ae212d1429fa"} Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.292397 4711 scope.go:117] "RemoveContainer" containerID="e338761dfdf8ece463a61451e2818e2c97cea03650979aa6722684820399f5e6" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.293025 4711 scope.go:117] "RemoveContainer" containerID="eafae4831c9cebb04dc1fe0259fa32717a44734f39508d8cd162ae212d1429fa" Jan 23 08:22:15 crc kubenswrapper[4711]: E0123 08:22:15.293288 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-vpxkq_openshift-multus(8cc803a0-2626-4444-b4b2-8e9567277d44)\"" pod="openshift-multus/multus-vpxkq" podUID="8cc803a0-2626-4444-b4b2-8e9567277d44" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.307828 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.473108 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.473220 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:15 crc kubenswrapper[4711]: I0123 08:22:15.474181 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:15 crc kubenswrapper[4711]: E0123 08:22:15.474173 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:15 crc kubenswrapper[4711]: E0123 08:22:15.474425 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:15 crc kubenswrapper[4711]: E0123 08:22:15.474337 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:15 crc kubenswrapper[4711]: E0123 08:22:15.550793 4711 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 23 08:22:16 crc kubenswrapper[4711]: I0123 08:22:16.297299 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" event={"ID":"97057697-e608-4882-a3be-dcb4e961ada4","Type":"ContainerStarted","Data":"fff73c149f89477debf4317ab3639502055acba980a3cbe28991e6338dbd6c96"} Jan 23 08:22:16 crc kubenswrapper[4711]: I0123 08:22:16.297361 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" event={"ID":"97057697-e608-4882-a3be-dcb4e961ada4","Type":"ContainerStarted","Data":"e1d275b915c565a6929c43a2a2098c57504825931f88bbbcdcddd5459ceeb59f"} Jan 23 08:22:16 crc kubenswrapper[4711]: I0123 08:22:16.299146 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpxkq_8cc803a0-2626-4444-b4b2-8e9567277d44/kube-multus/1.log" Jan 23 08:22:16 crc kubenswrapper[4711]: I0123 08:22:16.473525 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:16 crc kubenswrapper[4711]: E0123 08:22:16.473728 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:17 crc kubenswrapper[4711]: I0123 08:22:17.473190 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:17 crc kubenswrapper[4711]: I0123 08:22:17.473238 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:17 crc kubenswrapper[4711]: I0123 08:22:17.473204 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:17 crc kubenswrapper[4711]: E0123 08:22:17.473365 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:17 crc kubenswrapper[4711]: E0123 08:22:17.473426 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:17 crc kubenswrapper[4711]: E0123 08:22:17.473618 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:18 crc kubenswrapper[4711]: I0123 08:22:18.472710 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:18 crc kubenswrapper[4711]: E0123 08:22:18.472885 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:19 crc kubenswrapper[4711]: I0123 08:22:19.473283 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:19 crc kubenswrapper[4711]: I0123 08:22:19.473342 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:19 crc kubenswrapper[4711]: I0123 08:22:19.473309 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:19 crc kubenswrapper[4711]: E0123 08:22:19.473483 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:19 crc kubenswrapper[4711]: E0123 08:22:19.473602 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:19 crc kubenswrapper[4711]: E0123 08:22:19.473702 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:20 crc kubenswrapper[4711]: I0123 08:22:20.473459 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:20 crc kubenswrapper[4711]: E0123 08:22:20.473698 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:20 crc kubenswrapper[4711]: E0123 08:22:20.551824 4711 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 23 08:22:21 crc kubenswrapper[4711]: I0123 08:22:21.473192 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:21 crc kubenswrapper[4711]: E0123 08:22:21.473406 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:21 crc kubenswrapper[4711]: I0123 08:22:21.473731 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:21 crc kubenswrapper[4711]: E0123 08:22:21.473908 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:21 crc kubenswrapper[4711]: I0123 08:22:21.474021 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:21 crc kubenswrapper[4711]: E0123 08:22:21.474111 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:21 crc kubenswrapper[4711]: I0123 08:22:21.946714 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:22:21 crc kubenswrapper[4711]: I0123 08:22:21.964073 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:22:21 crc kubenswrapper[4711]: I0123 08:22:21.991145 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-j564p" podStartSLOduration=105.991123678 podStartE2EDuration="1m45.991123678s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:16.313865793 +0000 UTC m=+121.886822171" watchObservedRunningTime="2026-01-23 08:22:21.991123678 +0000 UTC m=+127.564080046" Jan 23 08:22:22 crc kubenswrapper[4711]: I0123 08:22:22.473179 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:22 crc kubenswrapper[4711]: E0123 08:22:22.473373 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:23 crc kubenswrapper[4711]: I0123 08:22:23.473131 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:23 crc kubenswrapper[4711]: I0123 08:22:23.473196 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:23 crc kubenswrapper[4711]: I0123 08:22:23.473190 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:23 crc kubenswrapper[4711]: E0123 08:22:23.473332 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:23 crc kubenswrapper[4711]: E0123 08:22:23.473452 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:23 crc kubenswrapper[4711]: E0123 08:22:23.473724 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:24 crc kubenswrapper[4711]: I0123 08:22:24.473231 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:24 crc kubenswrapper[4711]: E0123 08:22:24.473416 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:25 crc kubenswrapper[4711]: I0123 08:22:25.472884 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:25 crc kubenswrapper[4711]: I0123 08:22:25.472974 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:25 crc kubenswrapper[4711]: E0123 08:22:25.474312 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:25 crc kubenswrapper[4711]: I0123 08:22:25.474445 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:25 crc kubenswrapper[4711]: E0123 08:22:25.474745 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:25 crc kubenswrapper[4711]: E0123 08:22:25.474858 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:25 crc kubenswrapper[4711]: E0123 08:22:25.552497 4711 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 23 08:22:26 crc kubenswrapper[4711]: I0123 08:22:26.472891 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:26 crc kubenswrapper[4711]: E0123 08:22:26.473139 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:27 crc kubenswrapper[4711]: I0123 08:22:27.473743 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:27 crc kubenswrapper[4711]: E0123 08:22:27.473886 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:27 crc kubenswrapper[4711]: I0123 08:22:27.474292 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:27 crc kubenswrapper[4711]: E0123 08:22:27.474358 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:27 crc kubenswrapper[4711]: I0123 08:22:27.474569 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:27 crc kubenswrapper[4711]: E0123 08:22:27.474629 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:28 crc kubenswrapper[4711]: I0123 08:22:28.473024 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:28 crc kubenswrapper[4711]: E0123 08:22:28.473183 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:29 crc kubenswrapper[4711]: I0123 08:22:29.473363 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:29 crc kubenswrapper[4711]: E0123 08:22:29.474109 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:29 crc kubenswrapper[4711]: I0123 08:22:29.473431 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:29 crc kubenswrapper[4711]: E0123 08:22:29.474315 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:29 crc kubenswrapper[4711]: I0123 08:22:29.473760 4711 scope.go:117] "RemoveContainer" containerID="eafae4831c9cebb04dc1fe0259fa32717a44734f39508d8cd162ae212d1429fa" Jan 23 08:22:29 crc kubenswrapper[4711]: I0123 08:22:29.473430 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:29 crc kubenswrapper[4711]: E0123 08:22:29.475156 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:30 crc kubenswrapper[4711]: I0123 08:22:30.351982 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpxkq_8cc803a0-2626-4444-b4b2-8e9567277d44/kube-multus/1.log" Jan 23 08:22:30 crc kubenswrapper[4711]: I0123 08:22:30.352425 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpxkq" event={"ID":"8cc803a0-2626-4444-b4b2-8e9567277d44","Type":"ContainerStarted","Data":"bc8d9ec28733822aa97c5e7621962019e13e1d9e7d871480a0272e63ddae78d3"} Jan 23 08:22:30 crc kubenswrapper[4711]: I0123 08:22:30.473617 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:30 crc kubenswrapper[4711]: E0123 08:22:30.473901 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:30 crc kubenswrapper[4711]: E0123 08:22:30.554409 4711 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 23 08:22:31 crc kubenswrapper[4711]: I0123 08:22:31.473081 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:31 crc kubenswrapper[4711]: E0123 08:22:31.473499 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:31 crc kubenswrapper[4711]: I0123 08:22:31.473713 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:31 crc kubenswrapper[4711]: I0123 08:22:31.473726 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:31 crc kubenswrapper[4711]: E0123 08:22:31.474034 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:31 crc kubenswrapper[4711]: E0123 08:22:31.474084 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:32 crc kubenswrapper[4711]: I0123 08:22:32.472997 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:32 crc kubenswrapper[4711]: E0123 08:22:32.473986 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:33 crc kubenswrapper[4711]: I0123 08:22:33.472835 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:33 crc kubenswrapper[4711]: I0123 08:22:33.472879 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:33 crc kubenswrapper[4711]: E0123 08:22:33.473076 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:33 crc kubenswrapper[4711]: I0123 08:22:33.473208 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:33 crc kubenswrapper[4711]: E0123 08:22:33.473335 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:33 crc kubenswrapper[4711]: E0123 08:22:33.473446 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:34 crc kubenswrapper[4711]: I0123 08:22:34.473245 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:34 crc kubenswrapper[4711]: E0123 08:22:34.473486 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zv6rd" podUID="f2bbf296-ae82-4cc3-b07d-bba10895a545" Jan 23 08:22:35 crc kubenswrapper[4711]: I0123 08:22:35.473742 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:35 crc kubenswrapper[4711]: I0123 08:22:35.473924 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:35 crc kubenswrapper[4711]: I0123 08:22:35.474226 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:35 crc kubenswrapper[4711]: E0123 08:22:35.475368 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 23 08:22:35 crc kubenswrapper[4711]: E0123 08:22:35.475433 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 23 08:22:35 crc kubenswrapper[4711]: E0123 08:22:35.475562 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 23 08:22:36 crc kubenswrapper[4711]: I0123 08:22:36.473734 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:36 crc kubenswrapper[4711]: I0123 08:22:36.476930 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 23 08:22:36 crc kubenswrapper[4711]: I0123 08:22:36.478460 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 23 08:22:37 crc kubenswrapper[4711]: I0123 08:22:37.473674 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:37 crc kubenswrapper[4711]: I0123 08:22:37.473710 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:37 crc kubenswrapper[4711]: I0123 08:22:37.473696 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:37 crc kubenswrapper[4711]: I0123 08:22:37.476962 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 23 08:22:37 crc kubenswrapper[4711]: I0123 08:22:37.477134 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 23 08:22:37 crc kubenswrapper[4711]: I0123 08:22:37.477653 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 23 08:22:37 crc kubenswrapper[4711]: I0123 08:22:37.479534 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 23 08:22:43 crc kubenswrapper[4711]: I0123 08:22:43.998276 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:43 crc kubenswrapper[4711]: E0123 08:22:43.998602 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:24:45.998544665 +0000 UTC m=+271.571501033 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:44 crc kubenswrapper[4711]: I0123 08:22:44.099907 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:44 crc kubenswrapper[4711]: I0123 08:22:44.099982 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:44 crc kubenswrapper[4711]: I0123 08:22:44.100025 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:44 crc kubenswrapper[4711]: I0123 08:22:44.100076 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:44 crc kubenswrapper[4711]: I0123 08:22:44.101154 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:44 crc kubenswrapper[4711]: I0123 08:22:44.105443 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:44 crc kubenswrapper[4711]: I0123 08:22:44.106026 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:44 crc kubenswrapper[4711]: I0123 08:22:44.106315 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:44 crc kubenswrapper[4711]: I0123 08:22:44.388328 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:44 crc kubenswrapper[4711]: I0123 08:22:44.396201 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 23 08:22:44 crc kubenswrapper[4711]: I0123 08:22:44.401945 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 23 08:22:44 crc kubenswrapper[4711]: W0123 08:22:44.624027 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-b1caffe799c9355d9ca05ea5cc6c0be566b15e7eb060614f237352987da19181 WatchSource:0}: Error finding container b1caffe799c9355d9ca05ea5cc6c0be566b15e7eb060614f237352987da19181: Status 404 returned error can't find the container with id b1caffe799c9355d9ca05ea5cc6c0be566b15e7eb060614f237352987da19181 Jan 23 08:22:44 crc kubenswrapper[4711]: W0123 08:22:44.856440 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-99e5394aba87445d48488b1210641cad35645b729e6f5516eded1c2488720985 WatchSource:0}: Error finding container 99e5394aba87445d48488b1210641cad35645b729e6f5516eded1c2488720985: Status 404 returned error can't find the container with id 99e5394aba87445d48488b1210641cad35645b729e6f5516eded1c2488720985 Jan 23 08:22:44 crc kubenswrapper[4711]: W0123 08:22:44.864771 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-daeab368711c8b86938fd8fd6bade114a2435a5c33a934e925c9162fae16a4d9 WatchSource:0}: Error finding container daeab368711c8b86938fd8fd6bade114a2435a5c33a934e925c9162fae16a4d9: Status 404 returned error can't find the container with id daeab368711c8b86938fd8fd6bade114a2435a5c33a934e925c9162fae16a4d9 Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.410954 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"c589a0c1ee769adaa8a96e47b812927becaacfbeaf70f204a8b7d48d6f3958ab"} Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.411219 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"b1caffe799c9355d9ca05ea5cc6c0be566b15e7eb060614f237352987da19181"} Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.411473 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.413457 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"5181a7ec69a97f08e6cf45da8660c1be413f5d1cde14a48a2de5075bfa95e606"} Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.413552 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"daeab368711c8b86938fd8fd6bade114a2435a5c33a934e925c9162fae16a4d9"} Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.414942 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"c49ae09a79beaf5c3d99ba49a4e640b424f1516af7957c240e4d2ff2fb45fdc1"} Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.415006 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"99e5394aba87445d48488b1210641cad35645b729e6f5516eded1c2488720985"} Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.772459 4711 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.839497 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7nvn6"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.840249 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.843445 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.843449 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.843496 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.843580 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.844141 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vsck7"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.845413 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.845546 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fzkjs"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.845608 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.845709 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.846192 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.846402 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.846497 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.846825 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.846901 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.846967 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.847449 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.847682 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.847953 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.848345 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.848966 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.852146 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6rpqs"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.852430 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-f76vr"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.852577 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.852756 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.853007 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.858589 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.858748 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.859142 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.859307 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.859386 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.859641 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.859665 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.859801 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.859918 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.859957 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.859968 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.860029 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.860083 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.859973 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.860160 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.860225 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.860238 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.860295 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.860305 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.860625 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.860890 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.862896 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.863244 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.863360 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.863389 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.875470 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.875589 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.875671 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.875818 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.875668 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.875970 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.875989 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.876049 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.876114 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.876195 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.876228 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.876284 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.876356 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.876462 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.876536 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.876614 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.876740 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.876865 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.876973 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.877105 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.877129 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.877303 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.877496 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.877668 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-gxhxb"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.878285 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-gxhxb" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.879188 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.879317 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.880462 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-q5hjq"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.881348 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.882299 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-sk8zj"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.883151 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.884981 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.898700 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.906412 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.911396 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.935065 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.935760 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.938656 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.938736 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939481 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939580 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9068fcb6-d02f-4175-97f8-34d5ea1389fc-trusted-ca\") pod \"console-operator-58897d9998-f76vr\" (UID: \"9068fcb6-d02f-4175-97f8-34d5ea1389fc\") " pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939619 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgf6g\" (UniqueName: \"kubernetes.io/projected/b328000b-5587-4645-a3b6-02397de51cf6-kube-api-access-sgf6g\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939644 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d4183796-cb3c-4979-8336-9e15ba18f37a-auth-proxy-config\") pod \"machine-approver-56656f9798-tjf8f\" (UID: \"d4183796-cb3c-4979-8336-9e15ba18f37a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939660 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-etcd-serving-ca\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939675 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3578bb73-066e-43b9-85db-a5989823d8d1-node-pullsecrets\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939689 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7905e1f-a5a4-449c-b393-1b8ee89493d6-config\") pod \"openshift-apiserver-operator-796bbdcf4f-txv7j\" (UID: \"d7905e1f-a5a4-449c-b393-1b8ee89493d6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939703 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-config\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939717 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939739 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9afbdb97-d93a-494f-8ad0-23179afbee6d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fzkjs\" (UID: \"9afbdb97-d93a-494f-8ad0-23179afbee6d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939855 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9068fcb6-d02f-4175-97f8-34d5ea1389fc-serving-cert\") pod \"console-operator-58897d9998-f76vr\" (UID: \"9068fcb6-d02f-4175-97f8-34d5ea1389fc\") " pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939877 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/be88d29a-82f0-448c-bad5-545df560740b-encryption-config\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939897 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/86145ace-e2d3-4b5b-9475-f52b19faa9df-client-ca\") pod \"route-controller-manager-6576b87f9c-kqstp\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939912 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/be88d29a-82f0-448c-bad5-545df560740b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939926 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9afbdb97-d93a-494f-8ad0-23179afbee6d-images\") pod \"machine-api-operator-5694c8668f-fzkjs\" (UID: \"9afbdb97-d93a-494f-8ad0-23179afbee6d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939943 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9068fcb6-d02f-4175-97f8-34d5ea1389fc-config\") pod \"console-operator-58897d9998-f76vr\" (UID: \"9068fcb6-d02f-4175-97f8-34d5ea1389fc\") " pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939958 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86145ace-e2d3-4b5b-9475-f52b19faa9df-config\") pod \"route-controller-manager-6576b87f9c-kqstp\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939975 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72195d0a-6291-4d31-be15-10b066538f0e-config\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.939992 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940066 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4lx5\" (UniqueName: \"kubernetes.io/projected/d4183796-cb3c-4979-8336-9e15ba18f37a-kube-api-access-x4lx5\") pod \"machine-approver-56656f9798-tjf8f\" (UID: \"d4183796-cb3c-4979-8336-9e15ba18f37a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940087 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-audit\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940104 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c59f\" (UniqueName: \"kubernetes.io/projected/9068fcb6-d02f-4175-97f8-34d5ea1389fc-kube-api-access-2c59f\") pod \"console-operator-58897d9998-f76vr\" (UID: \"9068fcb6-d02f-4175-97f8-34d5ea1389fc\") " pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940165 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qm5t\" (UniqueName: \"kubernetes.io/projected/be88d29a-82f0-448c-bad5-545df560740b-kube-api-access-9qm5t\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940186 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-config\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940200 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3578bb73-066e-43b9-85db-a5989823d8d1-etcd-client\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940249 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/be88d29a-82f0-448c-bad5-545df560740b-audit-policies\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940267 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kpfq\" (UniqueName: \"kubernetes.io/projected/86145ace-e2d3-4b5b-9475-f52b19faa9df-kube-api-access-9kpfq\") pod \"route-controller-manager-6576b87f9c-kqstp\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940286 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4183796-cb3c-4979-8336-9e15ba18f37a-config\") pod \"machine-approver-56656f9798-tjf8f\" (UID: \"d4183796-cb3c-4979-8336-9e15ba18f37a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940301 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3578bb73-066e-43b9-85db-a5989823d8d1-audit-dir\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940316 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/be88d29a-82f0-448c-bad5-545df560740b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940332 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwz5w\" (UniqueName: \"kubernetes.io/projected/72195d0a-6291-4d31-be15-10b066538f0e-kube-api-access-kwz5w\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.940425 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/be88d29a-82f0-448c-bad5-545df560740b-etcd-client\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942453 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d4183796-cb3c-4979-8336-9e15ba18f37a-machine-approver-tls\") pod \"machine-approver-56656f9798-tjf8f\" (UID: \"d4183796-cb3c-4979-8336-9e15ba18f37a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942539 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/be88d29a-82f0-448c-bad5-545df560740b-audit-dir\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942573 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-image-import-ca\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942600 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9bzb\" (UniqueName: \"kubernetes.io/projected/3578bb73-066e-43b9-85db-a5989823d8d1-kube-api-access-j9bzb\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942624 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72195d0a-6291-4d31-be15-10b066538f0e-serving-cert\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942651 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-client-ca\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942673 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4th8k\" (UniqueName: \"kubernetes.io/projected/9afbdb97-d93a-494f-8ad0-23179afbee6d-kube-api-access-4th8k\") pod \"machine-api-operator-5694c8668f-fzkjs\" (UID: \"9afbdb97-d93a-494f-8ad0-23179afbee6d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942695 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86145ace-e2d3-4b5b-9475-f52b19faa9df-serving-cert\") pod \"route-controller-manager-6576b87f9c-kqstp\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942717 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/72195d0a-6291-4d31-be15-10b066538f0e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942776 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7905e1f-a5a4-449c-b393-1b8ee89493d6-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-txv7j\" (UID: \"d7905e1f-a5a4-449c-b393-1b8ee89493d6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942804 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be88d29a-82f0-448c-bad5-545df560740b-serving-cert\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942827 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/72195d0a-6291-4d31-be15-10b066538f0e-service-ca-bundle\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942847 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3578bb73-066e-43b9-85db-a5989823d8d1-serving-cert\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942876 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gprkq\" (UniqueName: \"kubernetes.io/projected/d7905e1f-a5a4-449c-b393-1b8ee89493d6-kube-api-access-gprkq\") pod \"openshift-apiserver-operator-796bbdcf4f-txv7j\" (UID: \"d7905e1f-a5a4-449c-b393-1b8ee89493d6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942901 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9afbdb97-d93a-494f-8ad0-23179afbee6d-config\") pod \"machine-api-operator-5694c8668f-fzkjs\" (UID: \"9afbdb97-d93a-494f-8ad0-23179afbee6d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942925 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3578bb73-066e-43b9-85db-a5989823d8d1-encryption-config\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.942948 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b328000b-5587-4645-a3b6-02397de51cf6-serving-cert\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.946863 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.947029 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7nvn6"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.948524 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.948796 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.949120 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.949440 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.949933 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.954048 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.954248 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.961723 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.962210 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.964747 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.973022 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fzkjs"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.973167 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.975456 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vsck7"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.976556 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.976963 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.976982 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.977200 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.977241 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.977452 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.977692 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.985368 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-f76vr"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.985477 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-v2m5p"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.986314 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.986521 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.986525 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.986547 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-nb2x5"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.986615 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.986699 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.986826 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.987080 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.988417 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nb2x5" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.988972 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989097 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989216 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989343 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989353 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989396 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989468 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989517 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989540 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989240 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989252 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989258 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989300 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989304 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.989672 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.992699 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.992980 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.993276 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.993320 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.994358 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.994662 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.995550 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.996764 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv"] Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.997756 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 23 08:22:45 crc kubenswrapper[4711]: I0123 08:22:45.998861 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:45.999986 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.000539 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.006652 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.006868 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.011351 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.011952 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.026569 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.027293 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zg5c9"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.027644 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.027795 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.028105 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.028169 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.028286 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.028849 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.029797 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.030364 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.033054 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6rpqs"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.033524 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-58wxx"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.034333 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.035142 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.035840 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.037583 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rhrsx"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.038318 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.044970 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.045779 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.046687 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-s6xwh"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.047235 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.047598 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.047232 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be88d29a-82f0-448c-bad5-545df560740b-serving-cert\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.047792 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/72195d0a-6291-4d31-be15-10b066538f0e-service-ca-bundle\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.047819 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99ae132d-a196-46f2-aae4-910e03935ee9-config\") pod \"kube-apiserver-operator-766d6c64bb-p5sq6\" (UID: \"99ae132d-a196-46f2-aae4-910e03935ee9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.047841 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d662cd8f-d996-413a-89c8-559898662622-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-9wx2k\" (UID: \"d662cd8f-d996-413a-89c8-559898662622\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.047885 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3578bb73-066e-43b9-85db-a5989823d8d1-serving-cert\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.047909 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd74590-a19d-4c99-bf60-59f66e85a484-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-cssg6\" (UID: \"ccd74590-a19d-4c99-bf60-59f66e85a484\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.047931 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzkp8\" (UniqueName: \"kubernetes.io/projected/be1b64cc-b8d5-429c-8189-542268f1d7a2-kube-api-access-tzkp8\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.047956 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3578bb73-066e-43b9-85db-a5989823d8d1-encryption-config\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.047977 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gprkq\" (UniqueName: \"kubernetes.io/projected/d7905e1f-a5a4-449c-b393-1b8ee89493d6-kube-api-access-gprkq\") pod \"openshift-apiserver-operator-796bbdcf4f-txv7j\" (UID: \"d7905e1f-a5a4-449c-b393-1b8ee89493d6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.047997 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9afbdb97-d93a-494f-8ad0-23179afbee6d-config\") pod \"machine-api-operator-5694c8668f-fzkjs\" (UID: \"9afbdb97-d93a-494f-8ad0-23179afbee6d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048022 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b328000b-5587-4645-a3b6-02397de51cf6-serving-cert\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048068 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048102 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9068fcb6-d02f-4175-97f8-34d5ea1389fc-trusted-ca\") pod \"console-operator-58897d9998-f76vr\" (UID: \"9068fcb6-d02f-4175-97f8-34d5ea1389fc\") " pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048124 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgf6g\" (UniqueName: \"kubernetes.io/projected/b328000b-5587-4645-a3b6-02397de51cf6-kube-api-access-sgf6g\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048149 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a44c144e-22c9-44be-88bd-408c82ed0e0a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-z4ppj\" (UID: \"a44c144e-22c9-44be-88bd-408c82ed0e0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048173 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d662cd8f-d996-413a-89c8-559898662622-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-9wx2k\" (UID: \"d662cd8f-d996-413a-89c8-559898662622\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048203 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d4183796-cb3c-4979-8336-9e15ba18f37a-auth-proxy-config\") pod \"machine-approver-56656f9798-tjf8f\" (UID: \"d4183796-cb3c-4979-8336-9e15ba18f37a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048227 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-etcd-serving-ca\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048253 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df27a7ac-56ad-458a-8954-4177f65db5ac-serving-cert\") pod \"openshift-config-operator-7777fb866f-g2w8q\" (UID: \"df27a7ac-56ad-458a-8954-4177f65db5ac\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048278 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3578bb73-066e-43b9-85db-a5989823d8d1-node-pullsecrets\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048304 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7905e1f-a5a4-449c-b393-1b8ee89493d6-config\") pod \"openshift-apiserver-operator-796bbdcf4f-txv7j\" (UID: \"d7905e1f-a5a4-449c-b393-1b8ee89493d6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048328 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-oauth-config\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048350 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pzpw\" (UniqueName: \"kubernetes.io/projected/a7e00bfd-844d-4264-aff6-d2bdb6673084-kube-api-access-4pzpw\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048375 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-config\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048399 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048423 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9afbdb97-d93a-494f-8ad0-23179afbee6d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fzkjs\" (UID: \"9afbdb97-d93a-494f-8ad0-23179afbee6d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048461 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9068fcb6-d02f-4175-97f8-34d5ea1389fc-serving-cert\") pod \"console-operator-58897d9998-f76vr\" (UID: \"9068fcb6-d02f-4175-97f8-34d5ea1389fc\") " pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048488 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/be88d29a-82f0-448c-bad5-545df560740b-encryption-config\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048534 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a44c144e-22c9-44be-88bd-408c82ed0e0a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-z4ppj\" (UID: \"a44c144e-22c9-44be-88bd-408c82ed0e0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048561 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mftqn\" (UniqueName: \"kubernetes.io/projected/ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58-kube-api-access-mftqn\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw7wx\" (UID: \"ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048586 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/99ae132d-a196-46f2-aae4-910e03935ee9-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-p5sq6\" (UID: \"99ae132d-a196-46f2-aae4-910e03935ee9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048613 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/86145ace-e2d3-4b5b-9475-f52b19faa9df-client-ca\") pod \"route-controller-manager-6576b87f9c-kqstp\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048638 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048675 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/be88d29a-82f0-448c-bad5-545df560740b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048700 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9afbdb97-d93a-494f-8ad0-23179afbee6d-images\") pod \"machine-api-operator-5694c8668f-fzkjs\" (UID: \"9afbdb97-d93a-494f-8ad0-23179afbee6d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048722 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-trusted-ca-bundle\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048745 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9068fcb6-d02f-4175-97f8-34d5ea1389fc-config\") pod \"console-operator-58897d9998-f76vr\" (UID: \"9068fcb6-d02f-4175-97f8-34d5ea1389fc\") " pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048767 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048791 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86145ace-e2d3-4b5b-9475-f52b19faa9df-config\") pod \"route-controller-manager-6576b87f9c-kqstp\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048814 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048842 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048861 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72195d0a-6291-4d31-be15-10b066538f0e-config\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048881 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpwvd\" (UniqueName: \"kubernetes.io/projected/341a7457-1e1c-4f8c-81b8-850161798640-kube-api-access-hpwvd\") pod \"cluster-samples-operator-665b6dd947-nrm2j\" (UID: \"341a7457-1e1c-4f8c-81b8-850161798640\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048904 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4lx5\" (UniqueName: \"kubernetes.io/projected/d4183796-cb3c-4979-8336-9e15ba18f37a-kube-api-access-x4lx5\") pod \"machine-approver-56656f9798-tjf8f\" (UID: \"d4183796-cb3c-4979-8336-9e15ba18f37a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048925 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mz88h\" (UniqueName: \"kubernetes.io/projected/9b990ad9-6046-4ef0-bf53-1a5a74c9d0d8-kube-api-access-mz88h\") pod \"downloads-7954f5f757-gxhxb\" (UID: \"9b990ad9-6046-4ef0-bf53-1a5a74c9d0d8\") " pod="openshift-console/downloads-7954f5f757-gxhxb" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048948 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-audit\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048971 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z2rd\" (UniqueName: \"kubernetes.io/projected/d662cd8f-d996-413a-89c8-559898662622-kube-api-access-4z2rd\") pod \"cluster-image-registry-operator-dc59b4c8b-9wx2k\" (UID: \"d662cd8f-d996-413a-89c8-559898662622\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.048992 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw7wx\" (UID: \"ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049014 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c59f\" (UniqueName: \"kubernetes.io/projected/9068fcb6-d02f-4175-97f8-34d5ea1389fc-kube-api-access-2c59f\") pod \"console-operator-58897d9998-f76vr\" (UID: \"9068fcb6-d02f-4175-97f8-34d5ea1389fc\") " pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049039 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-serving-cert\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049060 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049085 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qm5t\" (UniqueName: \"kubernetes.io/projected/be88d29a-82f0-448c-bad5-545df560740b-kube-api-access-9qm5t\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049104 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/df27a7ac-56ad-458a-8954-4177f65db5ac-available-featuregates\") pod \"openshift-config-operator-7777fb866f-g2w8q\" (UID: \"df27a7ac-56ad-458a-8954-4177f65db5ac\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049123 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049146 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-audit-policies\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049166 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049187 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw7wx\" (UID: \"ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049211 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-config\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049232 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99ae132d-a196-46f2-aae4-910e03935ee9-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-p5sq6\" (UID: \"99ae132d-a196-46f2-aae4-910e03935ee9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049251 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049276 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3578bb73-066e-43b9-85db-a5989823d8d1-etcd-client\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049295 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-service-ca\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049317 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svz8b\" (UniqueName: \"kubernetes.io/projected/df27a7ac-56ad-458a-8954-4177f65db5ac-kube-api-access-svz8b\") pod \"openshift-config-operator-7777fb866f-g2w8q\" (UID: \"df27a7ac-56ad-458a-8954-4177f65db5ac\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049339 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a44c144e-22c9-44be-88bd-408c82ed0e0a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-z4ppj\" (UID: \"a44c144e-22c9-44be-88bd-408c82ed0e0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.049386 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/be88d29a-82f0-448c-bad5-545df560740b-audit-policies\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051644 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/be88d29a-82f0-448c-bad5-545df560740b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051695 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kpfq\" (UniqueName: \"kubernetes.io/projected/86145ace-e2d3-4b5b-9475-f52b19faa9df-kube-api-access-9kpfq\") pod \"route-controller-manager-6576b87f9c-kqstp\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051716 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/341a7457-1e1c-4f8c-81b8-850161798640-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nrm2j\" (UID: \"341a7457-1e1c-4f8c-81b8-850161798640\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051733 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ccd74590-a19d-4c99-bf60-59f66e85a484-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-cssg6\" (UID: \"ccd74590-a19d-4c99-bf60-59f66e85a484\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051753 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4183796-cb3c-4979-8336-9e15ba18f37a-config\") pod \"machine-approver-56656f9798-tjf8f\" (UID: \"d4183796-cb3c-4979-8336-9e15ba18f37a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051768 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3578bb73-066e-43b9-85db-a5989823d8d1-audit-dir\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051764 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d4183796-cb3c-4979-8336-9e15ba18f37a-auth-proxy-config\") pod \"machine-approver-56656f9798-tjf8f\" (UID: \"d4183796-cb3c-4979-8336-9e15ba18f37a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051788 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/be88d29a-82f0-448c-bad5-545df560740b-etcd-client\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051854 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/be88d29a-82f0-448c-bad5-545df560740b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051893 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwz5w\" (UniqueName: \"kubernetes.io/projected/72195d0a-6291-4d31-be15-10b066538f0e-kube-api-access-kwz5w\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051933 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/be88d29a-82f0-448c-bad5-545df560740b-audit-dir\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051947 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/86145ace-e2d3-4b5b-9475-f52b19faa9df-client-ca\") pod \"route-controller-manager-6576b87f9c-kqstp\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051962 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/be1b64cc-b8d5-429c-8189-542268f1d7a2-audit-dir\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051957 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-etcd-serving-ca\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.051988 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d4183796-cb3c-4979-8336-9e15ba18f37a-machine-approver-tls\") pod \"machine-approver-56656f9798-tjf8f\" (UID: \"d4183796-cb3c-4979-8336-9e15ba18f37a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052019 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-image-import-ca\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052042 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9bzb\" (UniqueName: \"kubernetes.io/projected/3578bb73-066e-43b9-85db-a5989823d8d1-kube-api-access-j9bzb\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052067 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72195d0a-6291-4d31-be15-10b066538f0e-serving-cert\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052092 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052125 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d662cd8f-d996-413a-89c8-559898662622-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-9wx2k\" (UID: \"d662cd8f-d996-413a-89c8-559898662622\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052149 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-oauth-serving-cert\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052173 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-client-ca\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052212 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4th8k\" (UniqueName: \"kubernetes.io/projected/9afbdb97-d93a-494f-8ad0-23179afbee6d-kube-api-access-4th8k\") pod \"machine-api-operator-5694c8668f-fzkjs\" (UID: \"9afbdb97-d93a-494f-8ad0-23179afbee6d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052251 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86145ace-e2d3-4b5b-9475-f52b19faa9df-serving-cert\") pod \"route-controller-manager-6576b87f9c-kqstp\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052278 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/72195d0a-6291-4d31-be15-10b066538f0e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052309 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-config\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052340 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7905e1f-a5a4-449c-b393-1b8ee89493d6-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-txv7j\" (UID: \"d7905e1f-a5a4-449c-b393-1b8ee89493d6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052363 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm622\" (UniqueName: \"kubernetes.io/projected/ccd74590-a19d-4c99-bf60-59f66e85a484-kube-api-access-wm622\") pod \"openshift-controller-manager-operator-756b6f6bc6-cssg6\" (UID: \"ccd74590-a19d-4c99-bf60-59f66e85a484\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052388 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.052412 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.053790 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-image-import-ca\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.055623 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/be88d29a-82f0-448c-bad5-545df560740b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.055644 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/72195d0a-6291-4d31-be15-10b066538f0e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.055683 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/be88d29a-82f0-448c-bad5-545df560740b-audit-policies\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.055979 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-7bjqm"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.056797 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9afbdb97-d93a-494f-8ad0-23179afbee6d-images\") pod \"machine-api-operator-5694c8668f-fzkjs\" (UID: \"9afbdb97-d93a-494f-8ad0-23179afbee6d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.056816 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.056845 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/be88d29a-82f0-448c-bad5-545df560740b-audit-dir\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.057067 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be88d29a-82f0-448c-bad5-545df560740b-serving-cert\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.057669 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/be88d29a-82f0-448c-bad5-545df560740b-etcd-client\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.057970 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86145ace-e2d3-4b5b-9475-f52b19faa9df-config\") pod \"route-controller-manager-6576b87f9c-kqstp\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.057980 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-config\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.058485 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3578bb73-066e-43b9-85db-a5989823d8d1-audit-dir\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.058802 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9068fcb6-d02f-4175-97f8-34d5ea1389fc-trusted-ca\") pod \"console-operator-58897d9998-f76vr\" (UID: \"9068fcb6-d02f-4175-97f8-34d5ea1389fc\") " pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.058889 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4183796-cb3c-4979-8336-9e15ba18f37a-config\") pod \"machine-approver-56656f9798-tjf8f\" (UID: \"d4183796-cb3c-4979-8336-9e15ba18f37a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.059046 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.059068 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-config\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.059234 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9068fcb6-d02f-4175-97f8-34d5ea1389fc-config\") pod \"console-operator-58897d9998-f76vr\" (UID: \"9068fcb6-d02f-4175-97f8-34d5ea1389fc\") " pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.059688 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7905e1f-a5a4-449c-b393-1b8ee89493d6-config\") pod \"openshift-apiserver-operator-796bbdcf4f-txv7j\" (UID: \"d7905e1f-a5a4-449c-b393-1b8ee89493d6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.059696 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9afbdb97-d93a-494f-8ad0-23179afbee6d-config\") pod \"machine-api-operator-5694c8668f-fzkjs\" (UID: \"9afbdb97-d93a-494f-8ad0-23179afbee6d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.060035 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.060189 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/72195d0a-6291-4d31-be15-10b066538f0e-service-ca-bundle\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.060608 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3578bb73-066e-43b9-85db-a5989823d8d1-etcd-client\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.060857 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72195d0a-6291-4d31-be15-10b066538f0e-serving-cert\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.061260 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-client-ca\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.061614 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7905e1f-a5a4-449c-b393-1b8ee89493d6-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-txv7j\" (UID: \"d7905e1f-a5a4-449c-b393-1b8ee89493d6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.061664 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3578bb73-066e-43b9-85db-a5989823d8d1-node-pullsecrets\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.061801 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3578bb73-066e-43b9-85db-a5989823d8d1-audit\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.062547 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.063055 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86145ace-e2d3-4b5b-9475-f52b19faa9df-serving-cert\") pod \"route-controller-manager-6576b87f9c-kqstp\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.063456 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/be88d29a-82f0-448c-bad5-545df560740b-encryption-config\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.063697 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d4183796-cb3c-4979-8336-9e15ba18f37a-machine-approver-tls\") pod \"machine-approver-56656f9798-tjf8f\" (UID: \"d4183796-cb3c-4979-8336-9e15ba18f37a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.064720 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.065068 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3578bb73-066e-43b9-85db-a5989823d8d1-encryption-config\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.065441 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.066769 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hn45s"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.068113 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.068617 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-hn45s" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.068786 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.068641 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b328000b-5587-4645-a3b6-02397de51cf6-serving-cert\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.070887 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72195d0a-6291-4d31-be15-10b066538f0e-config\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.072783 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3578bb73-066e-43b9-85db-a5989823d8d1-serving-cert\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.072861 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-wl6fc"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.073750 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-wl6fc" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.074713 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9068fcb6-d02f-4175-97f8-34d5ea1389fc-serving-cert\") pod \"console-operator-58897d9998-f76vr\" (UID: \"9068fcb6-d02f-4175-97f8-34d5ea1389fc\") " pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.074782 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.075521 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.079434 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9afbdb97-d93a-494f-8ad0-23179afbee6d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fzkjs\" (UID: \"9afbdb97-d93a-494f-8ad0-23179afbee6d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.081156 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.082322 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.082807 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.083592 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.084296 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.085864 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-q5hjq"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.087556 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-sk8zj"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.089165 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.090570 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.097955 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.104056 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.104341 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.108171 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.109684 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.110819 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-nb2x5"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.112133 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.113228 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zg5c9"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.114297 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.115328 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.116352 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-gxhxb"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.117430 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.118764 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.119792 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-s6xwh"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.120813 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.121880 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-58wxx"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.122412 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.123055 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.124200 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-nd7dc"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.125133 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-nd7dc" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.125370 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.126203 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-wl6fc"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.127274 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rhrsx"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.128525 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.129913 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hn45s"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.131261 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.132578 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-7bjqm"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.133668 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.134691 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-nd7dc"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.135986 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-r9c8g"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.138546 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-vrl4r"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.138693 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.139327 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-r9c8g"] Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.139397 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-vrl4r" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.142289 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153175 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/be1b64cc-b8d5-429c-8189-542268f1d7a2-audit-dir\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153224 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-oauth-serving-cert\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153245 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153262 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d662cd8f-d996-413a-89c8-559898662622-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-9wx2k\" (UID: \"d662cd8f-d996-413a-89c8-559898662622\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153310 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-config\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153331 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm622\" (UniqueName: \"kubernetes.io/projected/ccd74590-a19d-4c99-bf60-59f66e85a484-kube-api-access-wm622\") pod \"openshift-controller-manager-operator-756b6f6bc6-cssg6\" (UID: \"ccd74590-a19d-4c99-bf60-59f66e85a484\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153347 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153364 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153398 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8b4e79a6-ff8b-4293-931b-bde9f25b7576-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rhrsx\" (UID: \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153427 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99ae132d-a196-46f2-aae4-910e03935ee9-config\") pod \"kube-apiserver-operator-766d6c64bb-p5sq6\" (UID: \"99ae132d-a196-46f2-aae4-910e03935ee9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153443 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d662cd8f-d996-413a-89c8-559898662622-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-9wx2k\" (UID: \"d662cd8f-d996-413a-89c8-559898662622\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153460 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzkp8\" (UniqueName: \"kubernetes.io/projected/be1b64cc-b8d5-429c-8189-542268f1d7a2-kube-api-access-tzkp8\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153476 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/238d099f-eb44-4b83-a996-647f7adad7d1-webhook-cert\") pod \"packageserver-d55dfcdfc-cw84n\" (UID: \"238d099f-eb44-4b83-a996-647f7adad7d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153494 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd74590-a19d-4c99-bf60-59f66e85a484-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-cssg6\" (UID: \"ccd74590-a19d-4c99-bf60-59f66e85a484\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153535 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/82d719a5-f69c-4ffa-8bab-5c73841665ee-etcd-service-ca\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153609 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153643 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a44c144e-22c9-44be-88bd-408c82ed0e0a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-z4ppj\" (UID: \"a44c144e-22c9-44be-88bd-408c82ed0e0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153666 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d662cd8f-d996-413a-89c8-559898662622-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-9wx2k\" (UID: \"d662cd8f-d996-413a-89c8-559898662622\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153688 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fde6fa25-f831-4858-96d5-c549d889c4c9-metrics-tls\") pod \"ingress-operator-5b745b69d9-zmvtv\" (UID: \"fde6fa25-f831-4858-96d5-c549d889c4c9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153721 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df27a7ac-56ad-458a-8954-4177f65db5ac-serving-cert\") pod \"openshift-config-operator-7777fb866f-g2w8q\" (UID: \"df27a7ac-56ad-458a-8954-4177f65db5ac\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153738 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lpll\" (UniqueName: \"kubernetes.io/projected/69261199-17d1-4122-ad29-ef7417a0f25e-kube-api-access-6lpll\") pod \"olm-operator-6b444d44fb-z2ndc\" (UID: \"69261199-17d1-4122-ad29-ef7417a0f25e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153754 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fde6fa25-f831-4858-96d5-c549d889c4c9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zmvtv\" (UID: \"fde6fa25-f831-4858-96d5-c549d889c4c9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153776 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5rrq\" (UniqueName: \"kubernetes.io/projected/238d099f-eb44-4b83-a996-647f7adad7d1-kube-api-access-t5rrq\") pod \"packageserver-d55dfcdfc-cw84n\" (UID: \"238d099f-eb44-4b83-a996-647f7adad7d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153802 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-oauth-config\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153823 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pzpw\" (UniqueName: \"kubernetes.io/projected/a7e00bfd-844d-4264-aff6-d2bdb6673084-kube-api-access-4pzpw\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153866 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/99ae132d-a196-46f2-aae4-910e03935ee9-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-p5sq6\" (UID: \"99ae132d-a196-46f2-aae4-910e03935ee9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153885 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a44c144e-22c9-44be-88bd-408c82ed0e0a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-z4ppj\" (UID: \"a44c144e-22c9-44be-88bd-408c82ed0e0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153901 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mftqn\" (UniqueName: \"kubernetes.io/projected/ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58-kube-api-access-mftqn\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw7wx\" (UID: \"ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153919 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zs5r\" (UniqueName: \"kubernetes.io/projected/9f985710-d7f9-4d47-bab6-12cea6e28ae9-kube-api-access-5zs5r\") pod \"multus-admission-controller-857f4d67dd-s6xwh\" (UID: \"9f985710-d7f9-4d47-bab6-12cea6e28ae9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153940 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/69261199-17d1-4122-ad29-ef7417a0f25e-srv-cert\") pod \"olm-operator-6b444d44fb-z2ndc\" (UID: \"69261199-17d1-4122-ad29-ef7417a0f25e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.153955 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/238d099f-eb44-4b83-a996-647f7adad7d1-tmpfs\") pod \"packageserver-d55dfcdfc-cw84n\" (UID: \"238d099f-eb44-4b83-a996-647f7adad7d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154014 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154033 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-trusted-ca-bundle\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154052 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq9gk\" (UniqueName: \"kubernetes.io/projected/8b4e79a6-ff8b-4293-931b-bde9f25b7576-kube-api-access-mq9gk\") pod \"marketplace-operator-79b997595-rhrsx\" (UID: \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154081 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154101 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154121 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/82d719a5-f69c-4ffa-8bab-5c73841665ee-etcd-ca\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154152 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpwvd\" (UniqueName: \"kubernetes.io/projected/341a7457-1e1c-4f8c-81b8-850161798640-kube-api-access-hpwvd\") pod \"cluster-samples-operator-665b6dd947-nrm2j\" (UID: \"341a7457-1e1c-4f8c-81b8-850161798640\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154181 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mz88h\" (UniqueName: \"kubernetes.io/projected/9b990ad9-6046-4ef0-bf53-1a5a74c9d0d8-kube-api-access-mz88h\") pod \"downloads-7954f5f757-gxhxb\" (UID: \"9b990ad9-6046-4ef0-bf53-1a5a74c9d0d8\") " pod="openshift-console/downloads-7954f5f757-gxhxb" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154197 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw7wx\" (UID: \"ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154213 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fde6fa25-f831-4858-96d5-c549d889c4c9-trusted-ca\") pod \"ingress-operator-5b745b69d9-zmvtv\" (UID: \"fde6fa25-f831-4858-96d5-c549d889c4c9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154234 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z2rd\" (UniqueName: \"kubernetes.io/projected/d662cd8f-d996-413a-89c8-559898662622-kube-api-access-4z2rd\") pod \"cluster-image-registry-operator-dc59b4c8b-9wx2k\" (UID: \"d662cd8f-d996-413a-89c8-559898662622\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154276 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-serving-cert\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154293 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154311 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82d719a5-f69c-4ffa-8bab-5c73841665ee-serving-cert\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154328 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154355 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/df27a7ac-56ad-458a-8954-4177f65db5ac-available-featuregates\") pod \"openshift-config-operator-7777fb866f-g2w8q\" (UID: \"df27a7ac-56ad-458a-8954-4177f65db5ac\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154372 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw7wx\" (UID: \"ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154393 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/238d099f-eb44-4b83-a996-647f7adad7d1-apiservice-cert\") pod \"packageserver-d55dfcdfc-cw84n\" (UID: \"238d099f-eb44-4b83-a996-647f7adad7d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154411 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt28m\" (UniqueName: \"kubernetes.io/projected/fde6fa25-f831-4858-96d5-c549d889c4c9-kube-api-access-qt28m\") pod \"ingress-operator-5b745b69d9-zmvtv\" (UID: \"fde6fa25-f831-4858-96d5-c549d889c4c9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154436 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-audit-policies\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154453 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154474 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99ae132d-a196-46f2-aae4-910e03935ee9-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-p5sq6\" (UID: \"99ae132d-a196-46f2-aae4-910e03935ee9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154490 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154520 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-service-ca\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154538 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svz8b\" (UniqueName: \"kubernetes.io/projected/df27a7ac-56ad-458a-8954-4177f65db5ac-kube-api-access-svz8b\") pod \"openshift-config-operator-7777fb866f-g2w8q\" (UID: \"df27a7ac-56ad-458a-8954-4177f65db5ac\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154560 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a44c144e-22c9-44be-88bd-408c82ed0e0a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-z4ppj\" (UID: \"a44c144e-22c9-44be-88bd-408c82ed0e0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154561 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-oauth-serving-cert\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154578 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8b4e79a6-ff8b-4293-931b-bde9f25b7576-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rhrsx\" (UID: \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154598 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/69261199-17d1-4122-ad29-ef7417a0f25e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-z2ndc\" (UID: \"69261199-17d1-4122-ad29-ef7417a0f25e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154619 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/341a7457-1e1c-4f8c-81b8-850161798640-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nrm2j\" (UID: \"341a7457-1e1c-4f8c-81b8-850161798640\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154636 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ccd74590-a19d-4c99-bf60-59f66e85a484-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-cssg6\" (UID: \"ccd74590-a19d-4c99-bf60-59f66e85a484\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154639 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154653 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d719a5-f69c-4ffa-8bab-5c73841665ee-config\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154712 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/82d719a5-f69c-4ffa-8bab-5c73841665ee-etcd-client\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154740 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2c8f\" (UniqueName: \"kubernetes.io/projected/82d719a5-f69c-4ffa-8bab-5c73841665ee-kube-api-access-k2c8f\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154772 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9f985710-d7f9-4d47-bab6-12cea6e28ae9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-s6xwh\" (UID: \"9f985710-d7f9-4d47-bab6-12cea6e28ae9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154828 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.154871 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/be1b64cc-b8d5-429c-8189-542268f1d7a2-audit-dir\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.155521 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-trusted-ca-bundle\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.155665 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/df27a7ac-56ad-458a-8954-4177f65db5ac-available-featuregates\") pod \"openshift-config-operator-7777fb866f-g2w8q\" (UID: \"df27a7ac-56ad-458a-8954-4177f65db5ac\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.156224 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-audit-policies\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.156585 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-config\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.156942 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99ae132d-a196-46f2-aae4-910e03935ee9-config\") pod \"kube-apiserver-operator-766d6c64bb-p5sq6\" (UID: \"99ae132d-a196-46f2-aae4-910e03935ee9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.156960 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d662cd8f-d996-413a-89c8-559898662622-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-9wx2k\" (UID: \"d662cd8f-d996-413a-89c8-559898662622\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.157687 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a44c144e-22c9-44be-88bd-408c82ed0e0a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-z4ppj\" (UID: \"a44c144e-22c9-44be-88bd-408c82ed0e0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.158028 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.157390 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccd74590-a19d-4c99-bf60-59f66e85a484-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-cssg6\" (UID: \"ccd74590-a19d-4c99-bf60-59f66e85a484\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.159370 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-service-ca\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.159592 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.159931 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df27a7ac-56ad-458a-8954-4177f65db5ac-serving-cert\") pod \"openshift-config-operator-7777fb866f-g2w8q\" (UID: \"df27a7ac-56ad-458a-8954-4177f65db5ac\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.160020 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/341a7457-1e1c-4f8c-81b8-850161798640-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nrm2j\" (UID: \"341a7457-1e1c-4f8c-81b8-850161798640\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.161206 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.161246 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a44c144e-22c9-44be-88bd-408c82ed0e0a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-z4ppj\" (UID: \"a44c144e-22c9-44be-88bd-408c82ed0e0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.161750 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d662cd8f-d996-413a-89c8-559898662622-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-9wx2k\" (UID: \"d662cd8f-d996-413a-89c8-559898662622\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.163401 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.164293 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-oauth-config\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.165143 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.165324 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-serving-cert\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.165469 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.166230 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ccd74590-a19d-4c99-bf60-59f66e85a484-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-cssg6\" (UID: \"ccd74590-a19d-4c99-bf60-59f66e85a484\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.166605 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.167345 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99ae132d-a196-46f2-aae4-910e03935ee9-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-p5sq6\" (UID: \"99ae132d-a196-46f2-aae4-910e03935ee9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.168405 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.169294 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.173954 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.183258 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.202796 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.208947 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw7wx\" (UID: \"ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.222397 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.229452 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw7wx\" (UID: \"ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.242888 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.255237 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fde6fa25-f831-4858-96d5-c549d889c4c9-trusted-ca\") pod \"ingress-operator-5b745b69d9-zmvtv\" (UID: \"fde6fa25-f831-4858-96d5-c549d889c4c9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.255459 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82d719a5-f69c-4ffa-8bab-5c73841665ee-serving-cert\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.255597 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/238d099f-eb44-4b83-a996-647f7adad7d1-apiservice-cert\") pod \"packageserver-d55dfcdfc-cw84n\" (UID: \"238d099f-eb44-4b83-a996-647f7adad7d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.255704 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt28m\" (UniqueName: \"kubernetes.io/projected/fde6fa25-f831-4858-96d5-c549d889c4c9-kube-api-access-qt28m\") pod \"ingress-operator-5b745b69d9-zmvtv\" (UID: \"fde6fa25-f831-4858-96d5-c549d889c4c9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.255842 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/69261199-17d1-4122-ad29-ef7417a0f25e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-z2ndc\" (UID: \"69261199-17d1-4122-ad29-ef7417a0f25e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.255923 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8b4e79a6-ff8b-4293-931b-bde9f25b7576-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rhrsx\" (UID: \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.256003 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d719a5-f69c-4ffa-8bab-5c73841665ee-config\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.256072 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/82d719a5-f69c-4ffa-8bab-5c73841665ee-etcd-client\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.256159 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2c8f\" (UniqueName: \"kubernetes.io/projected/82d719a5-f69c-4ffa-8bab-5c73841665ee-kube-api-access-k2c8f\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.256246 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9f985710-d7f9-4d47-bab6-12cea6e28ae9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-s6xwh\" (UID: \"9f985710-d7f9-4d47-bab6-12cea6e28ae9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.256358 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8b4e79a6-ff8b-4293-931b-bde9f25b7576-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rhrsx\" (UID: \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.256438 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/238d099f-eb44-4b83-a996-647f7adad7d1-webhook-cert\") pod \"packageserver-d55dfcdfc-cw84n\" (UID: \"238d099f-eb44-4b83-a996-647f7adad7d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.256566 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/82d719a5-f69c-4ffa-8bab-5c73841665ee-etcd-service-ca\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.256675 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fde6fa25-f831-4858-96d5-c549d889c4c9-metrics-tls\") pod \"ingress-operator-5b745b69d9-zmvtv\" (UID: \"fde6fa25-f831-4858-96d5-c549d889c4c9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.256746 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lpll\" (UniqueName: \"kubernetes.io/projected/69261199-17d1-4122-ad29-ef7417a0f25e-kube-api-access-6lpll\") pod \"olm-operator-6b444d44fb-z2ndc\" (UID: \"69261199-17d1-4122-ad29-ef7417a0f25e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.256828 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fde6fa25-f831-4858-96d5-c549d889c4c9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zmvtv\" (UID: \"fde6fa25-f831-4858-96d5-c549d889c4c9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.256908 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5rrq\" (UniqueName: \"kubernetes.io/projected/238d099f-eb44-4b83-a996-647f7adad7d1-kube-api-access-t5rrq\") pod \"packageserver-d55dfcdfc-cw84n\" (UID: \"238d099f-eb44-4b83-a996-647f7adad7d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.256998 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zs5r\" (UniqueName: \"kubernetes.io/projected/9f985710-d7f9-4d47-bab6-12cea6e28ae9-kube-api-access-5zs5r\") pod \"multus-admission-controller-857f4d67dd-s6xwh\" (UID: \"9f985710-d7f9-4d47-bab6-12cea6e28ae9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.257084 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/69261199-17d1-4122-ad29-ef7417a0f25e-srv-cert\") pod \"olm-operator-6b444d44fb-z2ndc\" (UID: \"69261199-17d1-4122-ad29-ef7417a0f25e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.257156 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/238d099f-eb44-4b83-a996-647f7adad7d1-tmpfs\") pod \"packageserver-d55dfcdfc-cw84n\" (UID: \"238d099f-eb44-4b83-a996-647f7adad7d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.257242 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq9gk\" (UniqueName: \"kubernetes.io/projected/8b4e79a6-ff8b-4293-931b-bde9f25b7576-kube-api-access-mq9gk\") pod \"marketplace-operator-79b997595-rhrsx\" (UID: \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.257313 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/82d719a5-f69c-4ffa-8bab-5c73841665ee-etcd-ca\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.257549 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/238d099f-eb44-4b83-a996-647f7adad7d1-tmpfs\") pod \"packageserver-d55dfcdfc-cw84n\" (UID: \"238d099f-eb44-4b83-a996-647f7adad7d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.296397 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.313072 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.329938 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.343056 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.362630 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.382412 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.402479 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.422336 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.442850 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.463189 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.483420 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.503658 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.523444 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.542978 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.561971 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.583591 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.604320 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.611321 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fde6fa25-f831-4858-96d5-c549d889c4c9-metrics-tls\") pod \"ingress-operator-5b745b69d9-zmvtv\" (UID: \"fde6fa25-f831-4858-96d5-c549d889c4c9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.623468 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.651061 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.656741 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fde6fa25-f831-4858-96d5-c549d889c4c9-trusted-ca\") pod \"ingress-operator-5b745b69d9-zmvtv\" (UID: \"fde6fa25-f831-4858-96d5-c549d889c4c9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.662925 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.683150 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.702494 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.722036 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.742926 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.762013 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.783154 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.802857 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.823264 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.842325 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.862756 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.882734 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.905609 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.909144 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82d719a5-f69c-4ffa-8bab-5c73841665ee-serving-cert\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.922805 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.929817 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/82d719a5-f69c-4ffa-8bab-5c73841665ee-etcd-client\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.942572 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.948369 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/238d099f-eb44-4b83-a996-647f7adad7d1-apiservice-cert\") pod \"packageserver-d55dfcdfc-cw84n\" (UID: \"238d099f-eb44-4b83-a996-647f7adad7d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.950451 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/238d099f-eb44-4b83-a996-647f7adad7d1-webhook-cert\") pod \"packageserver-d55dfcdfc-cw84n\" (UID: \"238d099f-eb44-4b83-a996-647f7adad7d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.962906 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 23 08:22:46 crc kubenswrapper[4711]: I0123 08:22:46.982837 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.004051 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.023678 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.040767 4711 request.go:700] Waited for 1.012048198s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dpprof-cert&limit=500&resourceVersion=0 Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.042660 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.050072 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/69261199-17d1-4122-ad29-ef7417a0f25e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-z2ndc\" (UID: \"69261199-17d1-4122-ad29-ef7417a0f25e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.063315 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.068396 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d719a5-f69c-4ffa-8bab-5c73841665ee-config\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.083559 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.088551 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/82d719a5-f69c-4ffa-8bab-5c73841665ee-etcd-ca\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.103889 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.107362 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/82d719a5-f69c-4ffa-8bab-5c73841665ee-etcd-service-ca\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.123268 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.143704 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.163196 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.183182 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.204352 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.222720 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.229905 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8b4e79a6-ff8b-4293-931b-bde9f25b7576-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rhrsx\" (UID: \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.250263 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 23 08:22:47 crc kubenswrapper[4711]: E0123 08:22:47.256656 4711 secret.go:188] Couldn't get secret openshift-multus/multus-admission-controller-secret: failed to sync secret cache: timed out waiting for the condition Jan 23 08:22:47 crc kubenswrapper[4711]: E0123 08:22:47.256874 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f985710-d7f9-4d47-bab6-12cea6e28ae9-webhook-certs podName:9f985710-d7f9-4d47-bab6-12cea6e28ae9 nodeName:}" failed. No retries permitted until 2026-01-23 08:22:47.756831002 +0000 UTC m=+153.329787370 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/9f985710-d7f9-4d47-bab6-12cea6e28ae9-webhook-certs") pod "multus-admission-controller-857f4d67dd-s6xwh" (UID: "9f985710-d7f9-4d47-bab6-12cea6e28ae9") : failed to sync secret cache: timed out waiting for the condition Jan 23 08:22:47 crc kubenswrapper[4711]: E0123 08:22:47.257381 4711 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 23 08:22:47 crc kubenswrapper[4711]: E0123 08:22:47.257537 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/69261199-17d1-4122-ad29-ef7417a0f25e-srv-cert podName:69261199-17d1-4122-ad29-ef7417a0f25e nodeName:}" failed. No retries permitted until 2026-01-23 08:22:47.757491448 +0000 UTC m=+153.330447886 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/69261199-17d1-4122-ad29-ef7417a0f25e-srv-cert") pod "olm-operator-6b444d44fb-z2ndc" (UID: "69261199-17d1-4122-ad29-ef7417a0f25e") : failed to sync secret cache: timed out waiting for the condition Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.258811 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8b4e79a6-ff8b-4293-931b-bde9f25b7576-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rhrsx\" (UID: \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.262938 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.282829 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.303463 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.323444 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.371268 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c59f\" (UniqueName: \"kubernetes.io/projected/9068fcb6-d02f-4175-97f8-34d5ea1389fc-kube-api-access-2c59f\") pod \"console-operator-58897d9998-f76vr\" (UID: \"9068fcb6-d02f-4175-97f8-34d5ea1389fc\") " pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.378458 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qm5t\" (UniqueName: \"kubernetes.io/projected/be88d29a-82f0-448c-bad5-545df560740b-kube-api-access-9qm5t\") pod \"apiserver-7bbb656c7d-ppc4l\" (UID: \"be88d29a-82f0-448c-bad5-545df560740b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.409196 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4th8k\" (UniqueName: \"kubernetes.io/projected/9afbdb97-d93a-494f-8ad0-23179afbee6d-kube-api-access-4th8k\") pod \"machine-api-operator-5694c8668f-fzkjs\" (UID: \"9afbdb97-d93a-494f-8ad0-23179afbee6d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.420083 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9bzb\" (UniqueName: \"kubernetes.io/projected/3578bb73-066e-43b9-85db-a5989823d8d1-kube-api-access-j9bzb\") pod \"apiserver-76f77b778f-7nvn6\" (UID: \"3578bb73-066e-43b9-85db-a5989823d8d1\") " pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.436110 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwz5w\" (UniqueName: \"kubernetes.io/projected/72195d0a-6291-4d31-be15-10b066538f0e-kube-api-access-kwz5w\") pod \"authentication-operator-69f744f599-6rpqs\" (UID: \"72195d0a-6291-4d31-be15-10b066538f0e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.459656 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgf6g\" (UniqueName: \"kubernetes.io/projected/b328000b-5587-4645-a3b6-02397de51cf6-kube-api-access-sgf6g\") pod \"controller-manager-879f6c89f-vsck7\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.480478 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gprkq\" (UniqueName: \"kubernetes.io/projected/d7905e1f-a5a4-449c-b393-1b8ee89493d6-kube-api-access-gprkq\") pod \"openshift-apiserver-operator-796bbdcf4f-txv7j\" (UID: \"d7905e1f-a5a4-449c-b393-1b8ee89493d6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.487550 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.497690 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.504162 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kpfq\" (UniqueName: \"kubernetes.io/projected/86145ace-e2d3-4b5b-9475-f52b19faa9df-kube-api-access-9kpfq\") pod \"route-controller-manager-6576b87f9c-kqstp\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.512868 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.519629 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4lx5\" (UniqueName: \"kubernetes.io/projected/d4183796-cb3c-4979-8336-9e15ba18f37a-kube-api-access-x4lx5\") pod \"machine-approver-56656f9798-tjf8f\" (UID: \"d4183796-cb3c-4979-8336-9e15ba18f37a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.522733 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.533365 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.542858 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.564060 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.582886 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.603855 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.628243 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.643460 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.662947 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.664258 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.690871 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.702965 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.705391 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.735297 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.744168 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.746839 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.762853 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.763530 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.773918 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.780045 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9f985710-d7f9-4d47-bab6-12cea6e28ae9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-s6xwh\" (UID: \"9f985710-d7f9-4d47-bab6-12cea6e28ae9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.780162 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/69261199-17d1-4122-ad29-ef7417a0f25e-srv-cert\") pod \"olm-operator-6b444d44fb-z2ndc\" (UID: \"69261199-17d1-4122-ad29-ef7417a0f25e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.783081 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.786698 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9f985710-d7f9-4d47-bab6-12cea6e28ae9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-s6xwh\" (UID: \"9f985710-d7f9-4d47-bab6-12cea6e28ae9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.802841 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.827264 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.854180 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.874804 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/69261199-17d1-4122-ad29-ef7417a0f25e-srv-cert\") pod \"olm-operator-6b444d44fb-z2ndc\" (UID: \"69261199-17d1-4122-ad29-ef7417a0f25e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.883066 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.912238 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.925982 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.928112 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l"] Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.946029 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.962340 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.972582 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-f76vr"] Jan 23 08:22:47 crc kubenswrapper[4711]: I0123 08:22:47.985969 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 23 08:22:48 crc kubenswrapper[4711]: W0123 08:22:48.004058 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9068fcb6_d02f_4175_97f8_34d5ea1389fc.slice/crio-91da0033cc420c4d65c6f5d2d3934968f90b6edc44fcaa3740ab8d2db942a694 WatchSource:0}: Error finding container 91da0033cc420c4d65c6f5d2d3934968f90b6edc44fcaa3740ab8d2db942a694: Status 404 returned error can't find the container with id 91da0033cc420c4d65c6f5d2d3934968f90b6edc44fcaa3740ab8d2db942a694 Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.005862 4711 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.011998 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j"] Jan 23 08:22:48 crc kubenswrapper[4711]: W0123 08:22:48.023537 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7905e1f_a5a4_449c_b393_1b8ee89493d6.slice/crio-bb1206055f8e083c12c4ab839813326c240a57a65d41d21f1ba2be9d9ae3b5dc WatchSource:0}: Error finding container bb1206055f8e083c12c4ab839813326c240a57a65d41d21f1ba2be9d9ae3b5dc: Status 404 returned error can't find the container with id bb1206055f8e083c12c4ab839813326c240a57a65d41d21f1ba2be9d9ae3b5dc Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.026038 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.045965 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.061127 4711 request.go:700] Waited for 1.921459611s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dnode-bootstrapper-token&limit=500&resourceVersion=0 Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.063178 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.083541 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.106428 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp"] Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.123742 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a44c144e-22c9-44be-88bd-408c82ed0e0a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-z4ppj\" (UID: \"a44c144e-22c9-44be-88bd-408c82ed0e0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.136749 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vsck7"] Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.137334 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d662cd8f-d996-413a-89c8-559898662622-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-9wx2k\" (UID: \"d662cd8f-d996-413a-89c8-559898662622\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.159461 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzkp8\" (UniqueName: \"kubernetes.io/projected/be1b64cc-b8d5-429c-8189-542268f1d7a2-kube-api-access-tzkp8\") pod \"oauth-openshift-558db77b4-q5hjq\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.181460 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pzpw\" (UniqueName: \"kubernetes.io/projected/a7e00bfd-844d-4264-aff6-d2bdb6673084-kube-api-access-4pzpw\") pod \"console-f9d7485db-sk8zj\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.192014 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.199966 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/99ae132d-a196-46f2-aae4-910e03935ee9-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-p5sq6\" (UID: \"99ae132d-a196-46f2-aae4-910e03935ee9\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.208020 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.233093 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7nvn6"] Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.234184 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.241049 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.243070 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mz88h\" (UniqueName: \"kubernetes.io/projected/9b990ad9-6046-4ef0-bf53-1a5a74c9d0d8-kube-api-access-mz88h\") pod \"downloads-7954f5f757-gxhxb\" (UID: \"9b990ad9-6046-4ef0-bf53-1a5a74c9d0d8\") " pod="openshift-console/downloads-7954f5f757-gxhxb" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.254928 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm622\" (UniqueName: \"kubernetes.io/projected/ccd74590-a19d-4c99-bf60-59f66e85a484-kube-api-access-wm622\") pod \"openshift-controller-manager-operator-756b6f6bc6-cssg6\" (UID: \"ccd74590-a19d-4c99-bf60-59f66e85a484\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.260898 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpwvd\" (UniqueName: \"kubernetes.io/projected/341a7457-1e1c-4f8c-81b8-850161798640-kube-api-access-hpwvd\") pod \"cluster-samples-operator-665b6dd947-nrm2j\" (UID: \"341a7457-1e1c-4f8c-81b8-850161798640\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.271500 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fzkjs"] Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.281164 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6rpqs"] Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.288255 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mftqn\" (UniqueName: \"kubernetes.io/projected/ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58-kube-api-access-mftqn\") pod \"kube-storage-version-migrator-operator-b67b599dd-zw7wx\" (UID: \"ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.300890 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z2rd\" (UniqueName: \"kubernetes.io/projected/d662cd8f-d996-413a-89c8-559898662622-kube-api-access-4z2rd\") pod \"cluster-image-registry-operator-dc59b4c8b-9wx2k\" (UID: \"d662cd8f-d996-413a-89c8-559898662622\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:48 crc kubenswrapper[4711]: W0123 08:22:48.305938 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9afbdb97_d93a_494f_8ad0_23179afbee6d.slice/crio-cf634cecbab108944382a521036a58fbba4ca4e61724726b74ec09bcecb541fe WatchSource:0}: Error finding container cf634cecbab108944382a521036a58fbba4ca4e61724726b74ec09bcecb541fe: Status 404 returned error can't find the container with id cf634cecbab108944382a521036a58fbba4ca4e61724726b74ec09bcecb541fe Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.322823 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svz8b\" (UniqueName: \"kubernetes.io/projected/df27a7ac-56ad-458a-8954-4177f65db5ac-kube-api-access-svz8b\") pod \"openshift-config-operator-7777fb866f-g2w8q\" (UID: \"df27a7ac-56ad-458a-8954-4177f65db5ac\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.338077 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt28m\" (UniqueName: \"kubernetes.io/projected/fde6fa25-f831-4858-96d5-c549d889c4c9-kube-api-access-qt28m\") pod \"ingress-operator-5b745b69d9-zmvtv\" (UID: \"fde6fa25-f831-4858-96d5-c549d889c4c9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.386417 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2c8f\" (UniqueName: \"kubernetes.io/projected/82d719a5-f69c-4ffa-8bab-5c73841665ee-kube-api-access-k2c8f\") pod \"etcd-operator-b45778765-zg5c9\" (UID: \"82d719a5-f69c-4ffa-8bab-5c73841665ee\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.394094 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lpll\" (UniqueName: \"kubernetes.io/projected/69261199-17d1-4122-ad29-ef7417a0f25e-kube-api-access-6lpll\") pod \"olm-operator-6b444d44fb-z2ndc\" (UID: \"69261199-17d1-4122-ad29-ef7417a0f25e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.413555 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fde6fa25-f831-4858-96d5-c549d889c4c9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zmvtv\" (UID: \"fde6fa25-f831-4858-96d5-c549d889c4c9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.428343 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-f76vr" event={"ID":"9068fcb6-d02f-4175-97f8-34d5ea1389fc","Type":"ContainerStarted","Data":"2e0e98d4a1ed8d4ccf06cf29775ffda40b0ecd03069bdec9e167517d59dc4555"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.428402 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-f76vr" event={"ID":"9068fcb6-d02f-4175-97f8-34d5ea1389fc","Type":"ContainerStarted","Data":"91da0033cc420c4d65c6f5d2d3934968f90b6edc44fcaa3740ab8d2db942a694"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.429305 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.429848 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" event={"ID":"72195d0a-6291-4d31-be15-10b066538f0e","Type":"ContainerStarted","Data":"93a1b5fce59381911ee78b7f71fd5cd578db0a78dbef19564230b52a1d30f12f"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.430375 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" event={"ID":"86145ace-e2d3-4b5b-9475-f52b19faa9df","Type":"ContainerStarted","Data":"8e718114153b12f96e59f9f472054b60b61e2b732324fe090bc870ee8f6041a5"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.430960 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" event={"ID":"3578bb73-066e-43b9-85db-a5989823d8d1","Type":"ContainerStarted","Data":"3aa56a6441f811b0b50ab1590a67af0f6c5128cc2185af535f2f5d78e68face8"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.431651 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" event={"ID":"d4183796-cb3c-4979-8336-9e15ba18f37a","Type":"ContainerStarted","Data":"1edf342e4ef32bfa2f4f2dbb67e27bc06014953d69bc4fc9513e20d7fe69075d"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.431681 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" event={"ID":"d4183796-cb3c-4979-8336-9e15ba18f37a","Type":"ContainerStarted","Data":"09f4833672257204118d250b309a7a36604664827f9ae95a285e4834a000ee10"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.432208 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" event={"ID":"b328000b-5587-4645-a3b6-02397de51cf6","Type":"ContainerStarted","Data":"9f5567ad5d4c80341566c3cbf9b6edff8fe4b7580ddeb2dbc0e6619d6072bd14"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.433111 4711 generic.go:334] "Generic (PLEG): container finished" podID="be88d29a-82f0-448c-bad5-545df560740b" containerID="ed18898027c4721487413d72a1e1a14f93b5da5474b8ef32584560a2ae387c9e" exitCode=0 Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.433154 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" event={"ID":"be88d29a-82f0-448c-bad5-545df560740b","Type":"ContainerDied","Data":"ed18898027c4721487413d72a1e1a14f93b5da5474b8ef32584560a2ae387c9e"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.433170 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" event={"ID":"be88d29a-82f0-448c-bad5-545df560740b","Type":"ContainerStarted","Data":"4c88851bfa8a33a325787a91c4b76bc88c9de8ad9550399658944277d2bde920"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.440205 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5rrq\" (UniqueName: \"kubernetes.io/projected/238d099f-eb44-4b83-a996-647f7adad7d1-kube-api-access-t5rrq\") pod \"packageserver-d55dfcdfc-cw84n\" (UID: \"238d099f-eb44-4b83-a996-647f7adad7d1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.440467 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.444282 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zs5r\" (UniqueName: \"kubernetes.io/projected/9f985710-d7f9-4d47-bab6-12cea6e28ae9-kube-api-access-5zs5r\") pod \"multus-admission-controller-857f4d67dd-s6xwh\" (UID: \"9f985710-d7f9-4d47-bab6-12cea6e28ae9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.448953 4711 patch_prober.go:28] interesting pod/console-operator-58897d9998-f76vr container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.449029 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-f76vr" podUID="9068fcb6-d02f-4175-97f8-34d5ea1389fc" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.449048 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" event={"ID":"9afbdb97-d93a-494f-8ad0-23179afbee6d","Type":"ContainerStarted","Data":"cf634cecbab108944382a521036a58fbba4ca4e61724726b74ec09bcecb541fe"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.449586 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-gxhxb" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.457217 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq9gk\" (UniqueName: \"kubernetes.io/projected/8b4e79a6-ff8b-4293-931b-bde9f25b7576-kube-api-access-mq9gk\") pod \"marketplace-operator-79b997595-rhrsx\" (UID: \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.473240 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-q5hjq"] Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.485537 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" event={"ID":"d7905e1f-a5a4-449c-b393-1b8ee89493d6","Type":"ContainerStarted","Data":"cb0948cb952c9952b5d08fa0d802868ad7951adc2c0291f8e278cc1c3c501922"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.485589 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" event={"ID":"d7905e1f-a5a4-449c-b393-1b8ee89493d6","Type":"ContainerStarted","Data":"bb1206055f8e083c12c4ab839813326c240a57a65d41d21f1ba2be9d9ae3b5dc"} Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489424 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-config-volume\") pod \"collect-profiles-29485935-856kh\" (UID: \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489467 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0915c58a-b29f-4de3-b5a0-e9985a2fd699-config\") pod \"service-ca-operator-777779d784-gtl2r\" (UID: \"0915c58a-b29f-4de3-b5a0-e9985a2fd699\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489541 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb9eeabb-23bb-45db-bcbb-aae7c165f260-trusted-ca\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489566 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f34584c2-05c1-4311-9168-fb7fb86976d9-auth-proxy-config\") pod \"machine-config-operator-74547568cd-mrmw6\" (UID: \"f34584c2-05c1-4311-9168-fb7fb86976d9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489624 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-metrics-certs\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489644 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96dd98a3-fa00-4d1f-ba55-ab03180cff4c-cert\") pod \"ingress-canary-wl6fc\" (UID: \"96dd98a3-fa00-4d1f-ba55-ab03180cff4c\") " pod="openshift-ingress-canary/ingress-canary-wl6fc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489725 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wqht\" (UniqueName: \"kubernetes.io/projected/ed15b7dc-3a50-412e-a239-ed37611684a8-kube-api-access-6wqht\") pod \"catalog-operator-68c6474976-z87tl\" (UID: \"ed15b7dc-3a50-412e-a239-ed37611684a8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489762 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bb9eeabb-23bb-45db-bcbb-aae7c165f260-registry-certificates\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489788 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fklbf\" (UniqueName: \"kubernetes.io/projected/fd4bd624-70bb-4602-a4f3-6824c59f90a4-kube-api-access-fklbf\") pod \"control-plane-machine-set-operator-78cbb6b69f-5mwhk\" (UID: \"fd4bd624-70bb-4602-a4f3-6824c59f90a4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489820 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/fd4bd624-70bb-4602-a4f3-6824c59f90a4-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5mwhk\" (UID: \"fd4bd624-70bb-4602-a4f3-6824c59f90a4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489873 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c17e743d-d33d-45c8-a15b-a6759f00838d-config\") pod \"kube-controller-manager-operator-78b949d7b-xcw55\" (UID: \"c17e743d-d33d-45c8-a15b-a6759f00838d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489924 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ed15b7dc-3a50-412e-a239-ed37611684a8-srv-cert\") pod \"catalog-operator-68c6474976-z87tl\" (UID: \"ed15b7dc-3a50-412e-a239-ed37611684a8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.489948 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-service-ca-bundle\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490011 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490050 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0915c58a-b29f-4de3-b5a0-e9985a2fd699-serving-cert\") pod \"service-ca-operator-777779d784-gtl2r\" (UID: \"0915c58a-b29f-4de3-b5a0-e9985a2fd699\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490074 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9v6h\" (UniqueName: \"kubernetes.io/projected/8d219b79-0bde-4bac-ae6e-2e39a766d0e0-kube-api-access-s9v6h\") pod \"dns-operator-744455d44c-hn45s\" (UID: \"8d219b79-0bde-4bac-ae6e-2e39a766d0e0\") " pod="openshift-dns-operator/dns-operator-744455d44c-hn45s" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490112 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-default-certificate\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490132 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8d219b79-0bde-4bac-ae6e-2e39a766d0e0-metrics-tls\") pod \"dns-operator-744455d44c-hn45s\" (UID: \"8d219b79-0bde-4bac-ae6e-2e39a766d0e0\") " pod="openshift-dns-operator/dns-operator-744455d44c-hn45s" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490155 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ed15b7dc-3a50-412e-a239-ed37611684a8-profile-collector-cert\") pod \"catalog-operator-68c6474976-z87tl\" (UID: \"ed15b7dc-3a50-412e-a239-ed37611684a8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490192 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-stats-auth\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490254 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0c1a3ec4-2523-4339-a252-99b7d88c8c93-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nkrcc\" (UID: \"0c1a3ec4-2523-4339-a252-99b7d88c8c93\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490281 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtndh\" (UniqueName: \"kubernetes.io/projected/0c1a3ec4-2523-4339-a252-99b7d88c8c93-kube-api-access-dtndh\") pod \"package-server-manager-789f6589d5-nkrcc\" (UID: \"0c1a3ec4-2523-4339-a252-99b7d88c8c93\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490395 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tnld\" (UniqueName: \"kubernetes.io/projected/0915c58a-b29f-4de3-b5a0-e9985a2fd699-kube-api-access-7tnld\") pod \"service-ca-operator-777779d784-gtl2r\" (UID: \"0915c58a-b29f-4de3-b5a0-e9985a2fd699\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490445 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f34584c2-05c1-4311-9168-fb7fb86976d9-proxy-tls\") pod \"machine-config-operator-74547568cd-mrmw6\" (UID: \"f34584c2-05c1-4311-9168-fb7fb86976d9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490471 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7c9cf2f9-5ba9-4670-9240-25b3d24104c4-signing-cabundle\") pod \"service-ca-9c57cc56f-7bjqm\" (UID: \"7c9cf2f9-5ba9-4670-9240-25b3d24104c4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490527 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bb9eeabb-23bb-45db-bcbb-aae7c165f260-installation-pull-secrets\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490553 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdmq6\" (UniqueName: \"kubernetes.io/projected/3537ecbf-d37c-458e-b460-8f72882bfd08-kube-api-access-xdmq6\") pod \"migrator-59844c95c7-nb2x5\" (UID: \"3537ecbf-d37c-458e-b460-8f72882bfd08\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nb2x5" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490574 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c17e743d-d33d-45c8-a15b-a6759f00838d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xcw55\" (UID: \"c17e743d-d33d-45c8-a15b-a6759f00838d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490608 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-registry-tls\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490666 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-secret-volume\") pod \"collect-profiles-29485935-856kh\" (UID: \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490928 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv5gl\" (UniqueName: \"kubernetes.io/projected/f34584c2-05c1-4311-9168-fb7fb86976d9-kube-api-access-cv5gl\") pod \"machine-config-operator-74547568cd-mrmw6\" (UID: \"f34584c2-05c1-4311-9168-fb7fb86976d9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.490972 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwk4g\" (UniqueName: \"kubernetes.io/projected/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-kube-api-access-bwk4g\") pod \"collect-profiles-29485935-856kh\" (UID: \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.491010 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c17e743d-d33d-45c8-a15b-a6759f00838d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xcw55\" (UID: \"c17e743d-d33d-45c8-a15b-a6759f00838d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.491031 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-bound-sa-token\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.491053 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bb9eeabb-23bb-45db-bcbb-aae7c165f260-ca-trust-extracted\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.491073 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwbjg\" (UniqueName: \"kubernetes.io/projected/96dd98a3-fa00-4d1f-ba55-ab03180cff4c-kube-api-access-bwbjg\") pod \"ingress-canary-wl6fc\" (UID: \"96dd98a3-fa00-4d1f-ba55-ab03180cff4c\") " pod="openshift-ingress-canary/ingress-canary-wl6fc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.491112 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f34584c2-05c1-4311-9168-fb7fb86976d9-images\") pod \"machine-config-operator-74547568cd-mrmw6\" (UID: \"f34584c2-05c1-4311-9168-fb7fb86976d9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.491160 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm8sg\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-kube-api-access-xm8sg\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.491334 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjhbq\" (UniqueName: \"kubernetes.io/projected/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-kube-api-access-qjhbq\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.491372 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dk8p7\" (UniqueName: \"kubernetes.io/projected/7c9cf2f9-5ba9-4670-9240-25b3d24104c4-kube-api-access-dk8p7\") pod \"service-ca-9c57cc56f-7bjqm\" (UID: \"7c9cf2f9-5ba9-4670-9240-25b3d24104c4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.491412 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7c9cf2f9-5ba9-4670-9240-25b3d24104c4-signing-key\") pod \"service-ca-9c57cc56f-7bjqm\" (UID: \"7c9cf2f9-5ba9-4670-9240-25b3d24104c4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.508085 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.509111 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" Jan 23 08:22:48 crc kubenswrapper[4711]: E0123 08:22:48.518739 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:49.018715717 +0000 UTC m=+154.591672075 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.520447 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.526002 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-sk8zj"] Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.526184 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.549385 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.596530 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:48 crc kubenswrapper[4711]: E0123 08:22:48.596625 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:49.09660295 +0000 UTC m=+154.669559308 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.597810 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f34584c2-05c1-4311-9168-fb7fb86976d9-auth-proxy-config\") pod \"machine-config-operator-74547568cd-mrmw6\" (UID: \"f34584c2-05c1-4311-9168-fb7fb86976d9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.597861 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/343eb722-17b5-4a43-874c-2dc792355793-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-p9lxv\" (UID: \"343eb722-17b5-4a43-874c-2dc792355793\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.597924 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-metrics-certs\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.597951 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96dd98a3-fa00-4d1f-ba55-ab03180cff4c-cert\") pod \"ingress-canary-wl6fc\" (UID: \"96dd98a3-fa00-4d1f-ba55-ab03180cff4c\") " pod="openshift-ingress-canary/ingress-canary-wl6fc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598007 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svtdx\" (UniqueName: \"kubernetes.io/projected/0bc67a26-61e7-42bd-b765-2c992886ac63-kube-api-access-svtdx\") pod \"dns-default-nd7dc\" (UID: \"0bc67a26-61e7-42bd-b765-2c992886ac63\") " pod="openshift-dns/dns-default-nd7dc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598033 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wqht\" (UniqueName: \"kubernetes.io/projected/ed15b7dc-3a50-412e-a239-ed37611684a8-kube-api-access-6wqht\") pod \"catalog-operator-68c6474976-z87tl\" (UID: \"ed15b7dc-3a50-412e-a239-ed37611684a8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598057 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-mountpoint-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598076 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-plugins-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598099 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bb9eeabb-23bb-45db-bcbb-aae7c165f260-registry-certificates\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598122 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fklbf\" (UniqueName: \"kubernetes.io/projected/fd4bd624-70bb-4602-a4f3-6824c59f90a4-kube-api-access-fklbf\") pod \"control-plane-machine-set-operator-78cbb6b69f-5mwhk\" (UID: \"fd4bd624-70bb-4602-a4f3-6824c59f90a4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598157 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/fd4bd624-70bb-4602-a4f3-6824c59f90a4-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5mwhk\" (UID: \"fd4bd624-70bb-4602-a4f3-6824c59f90a4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598182 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c17e743d-d33d-45c8-a15b-a6759f00838d-config\") pod \"kube-controller-manager-operator-78b949d7b-xcw55\" (UID: \"c17e743d-d33d-45c8-a15b-a6759f00838d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598224 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ed15b7dc-3a50-412e-a239-ed37611684a8-srv-cert\") pod \"catalog-operator-68c6474976-z87tl\" (UID: \"ed15b7dc-3a50-412e-a239-ed37611684a8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598246 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-service-ca-bundle\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598285 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598307 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-registration-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598333 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0915c58a-b29f-4de3-b5a0-e9985a2fd699-serving-cert\") pod \"service-ca-operator-777779d784-gtl2r\" (UID: \"0915c58a-b29f-4de3-b5a0-e9985a2fd699\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598359 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9v6h\" (UniqueName: \"kubernetes.io/projected/8d219b79-0bde-4bac-ae6e-2e39a766d0e0-kube-api-access-s9v6h\") pod \"dns-operator-744455d44c-hn45s\" (UID: \"8d219b79-0bde-4bac-ae6e-2e39a766d0e0\") " pod="openshift-dns-operator/dns-operator-744455d44c-hn45s" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598381 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ed15b7dc-3a50-412e-a239-ed37611684a8-profile-collector-cert\") pod \"catalog-operator-68c6474976-z87tl\" (UID: \"ed15b7dc-3a50-412e-a239-ed37611684a8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598404 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-default-certificate\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598425 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8d219b79-0bde-4bac-ae6e-2e39a766d0e0-metrics-tls\") pod \"dns-operator-744455d44c-hn45s\" (UID: \"8d219b79-0bde-4bac-ae6e-2e39a766d0e0\") " pod="openshift-dns-operator/dns-operator-744455d44c-hn45s" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598462 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/98a4475c-0ffd-49e8-8cd1-e5636ccb174f-node-bootstrap-token\") pod \"machine-config-server-vrl4r\" (UID: \"98a4475c-0ffd-49e8-8cd1-e5636ccb174f\") " pod="openshift-machine-config-operator/machine-config-server-vrl4r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.598492 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0bc67a26-61e7-42bd-b765-2c992886ac63-metrics-tls\") pod \"dns-default-nd7dc\" (UID: \"0bc67a26-61e7-42bd-b765-2c992886ac63\") " pod="openshift-dns/dns-default-nd7dc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.601394 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bb9eeabb-23bb-45db-bcbb-aae7c165f260-registry-certificates\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: E0123 08:22:48.603020 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:49.102996011 +0000 UTC m=+154.675952369 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.604175 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f34584c2-05c1-4311-9168-fb7fb86976d9-auth-proxy-config\") pod \"machine-config-operator-74547568cd-mrmw6\" (UID: \"f34584c2-05c1-4311-9168-fb7fb86976d9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.605208 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-service-ca-bundle\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.605959 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c17e743d-d33d-45c8-a15b-a6759f00838d-config\") pod \"kube-controller-manager-operator-78b949d7b-xcw55\" (UID: \"c17e743d-d33d-45c8-a15b-a6759f00838d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.609886 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.610099 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-stats-auth\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.610543 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-metrics-certs\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.611219 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0c1a3ec4-2523-4339-a252-99b7d88c8c93-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nkrcc\" (UID: \"0c1a3ec4-2523-4339-a252-99b7d88c8c93\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.611414 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtndh\" (UniqueName: \"kubernetes.io/projected/0c1a3ec4-2523-4339-a252-99b7d88c8c93-kube-api-access-dtndh\") pod \"package-server-manager-789f6589d5-nkrcc\" (UID: \"0c1a3ec4-2523-4339-a252-99b7d88c8c93\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.611559 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/343eb722-17b5-4a43-874c-2dc792355793-proxy-tls\") pod \"machine-config-controller-84d6567774-p9lxv\" (UID: \"343eb722-17b5-4a43-874c-2dc792355793\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.611669 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0bc67a26-61e7-42bd-b765-2c992886ac63-config-volume\") pod \"dns-default-nd7dc\" (UID: \"0bc67a26-61e7-42bd-b765-2c992886ac63\") " pod="openshift-dns/dns-default-nd7dc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.611722 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sff7r\" (UniqueName: \"kubernetes.io/projected/98a4475c-0ffd-49e8-8cd1-e5636ccb174f-kube-api-access-sff7r\") pod \"machine-config-server-vrl4r\" (UID: \"98a4475c-0ffd-49e8-8cd1-e5636ccb174f\") " pod="openshift-machine-config-operator/machine-config-server-vrl4r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.611752 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tnld\" (UniqueName: \"kubernetes.io/projected/0915c58a-b29f-4de3-b5a0-e9985a2fd699-kube-api-access-7tnld\") pod \"service-ca-operator-777779d784-gtl2r\" (UID: \"0915c58a-b29f-4de3-b5a0-e9985a2fd699\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.611788 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f34584c2-05c1-4311-9168-fb7fb86976d9-proxy-tls\") pod \"machine-config-operator-74547568cd-mrmw6\" (UID: \"f34584c2-05c1-4311-9168-fb7fb86976d9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.611836 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/98a4475c-0ffd-49e8-8cd1-e5636ccb174f-certs\") pod \"machine-config-server-vrl4r\" (UID: \"98a4475c-0ffd-49e8-8cd1-e5636ccb174f\") " pod="openshift-machine-config-operator/machine-config-server-vrl4r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.611864 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7c9cf2f9-5ba9-4670-9240-25b3d24104c4-signing-cabundle\") pod \"service-ca-9c57cc56f-7bjqm\" (UID: \"7c9cf2f9-5ba9-4670-9240-25b3d24104c4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.611885 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bb9eeabb-23bb-45db-bcbb-aae7c165f260-installation-pull-secrets\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.611940 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdmq6\" (UniqueName: \"kubernetes.io/projected/3537ecbf-d37c-458e-b460-8f72882bfd08-kube-api-access-xdmq6\") pod \"migrator-59844c95c7-nb2x5\" (UID: \"3537ecbf-d37c-458e-b460-8f72882bfd08\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nb2x5" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.611960 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c17e743d-d33d-45c8-a15b-a6759f00838d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xcw55\" (UID: \"c17e743d-d33d-45c8-a15b-a6759f00838d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612028 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-registry-tls\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612143 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-secret-volume\") pod \"collect-profiles-29485935-856kh\" (UID: \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612162 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv5gl\" (UniqueName: \"kubernetes.io/projected/f34584c2-05c1-4311-9168-fb7fb86976d9-kube-api-access-cv5gl\") pod \"machine-config-operator-74547568cd-mrmw6\" (UID: \"f34584c2-05c1-4311-9168-fb7fb86976d9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612209 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwk4g\" (UniqueName: \"kubernetes.io/projected/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-kube-api-access-bwk4g\") pod \"collect-profiles-29485935-856kh\" (UID: \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612238 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c17e743d-d33d-45c8-a15b-a6759f00838d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xcw55\" (UID: \"c17e743d-d33d-45c8-a15b-a6759f00838d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612260 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-csi-data-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612283 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-bound-sa-token\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612308 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bb9eeabb-23bb-45db-bcbb-aae7c165f260-ca-trust-extracted\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612335 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwbjg\" (UniqueName: \"kubernetes.io/projected/96dd98a3-fa00-4d1f-ba55-ab03180cff4c-kube-api-access-bwbjg\") pod \"ingress-canary-wl6fc\" (UID: \"96dd98a3-fa00-4d1f-ba55-ab03180cff4c\") " pod="openshift-ingress-canary/ingress-canary-wl6fc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612376 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f34584c2-05c1-4311-9168-fb7fb86976d9-images\") pod \"machine-config-operator-74547568cd-mrmw6\" (UID: \"f34584c2-05c1-4311-9168-fb7fb86976d9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612403 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm8sg\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-kube-api-access-xm8sg\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612445 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjhbq\" (UniqueName: \"kubernetes.io/projected/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-kube-api-access-qjhbq\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612469 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dk8p7\" (UniqueName: \"kubernetes.io/projected/7c9cf2f9-5ba9-4670-9240-25b3d24104c4-kube-api-access-dk8p7\") pod \"service-ca-9c57cc56f-7bjqm\" (UID: \"7c9cf2f9-5ba9-4670-9240-25b3d24104c4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612556 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7c9cf2f9-5ba9-4670-9240-25b3d24104c4-signing-key\") pod \"service-ca-9c57cc56f-7bjqm\" (UID: \"7c9cf2f9-5ba9-4670-9240-25b3d24104c4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612620 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zghxl\" (UniqueName: \"kubernetes.io/projected/343eb722-17b5-4a43-874c-2dc792355793-kube-api-access-zghxl\") pod \"machine-config-controller-84d6567774-p9lxv\" (UID: \"343eb722-17b5-4a43-874c-2dc792355793\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612667 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-config-volume\") pod \"collect-profiles-29485935-856kh\" (UID: \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612689 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0915c58a-b29f-4de3-b5a0-e9985a2fd699-config\") pod \"service-ca-operator-777779d784-gtl2r\" (UID: \"0915c58a-b29f-4de3-b5a0-e9985a2fd699\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612711 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-socket-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612747 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb9eeabb-23bb-45db-bcbb-aae7c165f260-trusted-ca\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.612772 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrndz\" (UniqueName: \"kubernetes.io/projected/ce996e23-feba-490d-9e1b-1ae772ed7886-kube-api-access-qrndz\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.614761 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7c9cf2f9-5ba9-4670-9240-25b3d24104c4-signing-cabundle\") pod \"service-ca-9c57cc56f-7bjqm\" (UID: \"7c9cf2f9-5ba9-4670-9240-25b3d24104c4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.615612 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ed15b7dc-3a50-412e-a239-ed37611684a8-srv-cert\") pod \"catalog-operator-68c6474976-z87tl\" (UID: \"ed15b7dc-3a50-412e-a239-ed37611684a8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.616343 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bb9eeabb-23bb-45db-bcbb-aae7c165f260-ca-trust-extracted\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.617090 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-default-certificate\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.617215 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f34584c2-05c1-4311-9168-fb7fb86976d9-images\") pod \"machine-config-operator-74547568cd-mrmw6\" (UID: \"f34584c2-05c1-4311-9168-fb7fb86976d9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.625527 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0915c58a-b29f-4de3-b5a0-e9985a2fd699-serving-cert\") pod \"service-ca-operator-777779d784-gtl2r\" (UID: \"0915c58a-b29f-4de3-b5a0-e9985a2fd699\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.626022 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-stats-auth\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.628170 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-config-volume\") pod \"collect-profiles-29485935-856kh\" (UID: \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.628298 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0915c58a-b29f-4de3-b5a0-e9985a2fd699-config\") pod \"service-ca-operator-777779d784-gtl2r\" (UID: \"0915c58a-b29f-4de3-b5a0-e9985a2fd699\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.629027 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f34584c2-05c1-4311-9168-fb7fb86976d9-proxy-tls\") pod \"machine-config-operator-74547568cd-mrmw6\" (UID: \"f34584c2-05c1-4311-9168-fb7fb86976d9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.629950 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb9eeabb-23bb-45db-bcbb-aae7c165f260-trusted-ca\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.631171 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/0c1a3ec4-2523-4339-a252-99b7d88c8c93-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-nkrcc\" (UID: \"0c1a3ec4-2523-4339-a252-99b7d88c8c93\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.631397 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-secret-volume\") pod \"collect-profiles-29485935-856kh\" (UID: \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.632310 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8d219b79-0bde-4bac-ae6e-2e39a766d0e0-metrics-tls\") pod \"dns-operator-744455d44c-hn45s\" (UID: \"8d219b79-0bde-4bac-ae6e-2e39a766d0e0\") " pod="openshift-dns-operator/dns-operator-744455d44c-hn45s" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.633882 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-registry-tls\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.634317 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7c9cf2f9-5ba9-4670-9240-25b3d24104c4-signing-key\") pod \"service-ca-9c57cc56f-7bjqm\" (UID: \"7c9cf2f9-5ba9-4670-9240-25b3d24104c4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.635582 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bb9eeabb-23bb-45db-bcbb-aae7c165f260-installation-pull-secrets\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.635905 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ed15b7dc-3a50-412e-a239-ed37611684a8-profile-collector-cert\") pod \"catalog-operator-68c6474976-z87tl\" (UID: \"ed15b7dc-3a50-412e-a239-ed37611684a8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.636454 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/fd4bd624-70bb-4602-a4f3-6824c59f90a4-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5mwhk\" (UID: \"fd4bd624-70bb-4602-a4f3-6824c59f90a4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.637465 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96dd98a3-fa00-4d1f-ba55-ab03180cff4c-cert\") pod \"ingress-canary-wl6fc\" (UID: \"96dd98a3-fa00-4d1f-ba55-ab03180cff4c\") " pod="openshift-ingress-canary/ingress-canary-wl6fc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.642139 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.645657 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c17e743d-d33d-45c8-a15b-a6759f00838d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xcw55\" (UID: \"c17e743d-d33d-45c8-a15b-a6759f00838d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.646290 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj"] Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.653757 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.658255 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fklbf\" (UniqueName: \"kubernetes.io/projected/fd4bd624-70bb-4602-a4f3-6824c59f90a4-kube-api-access-fklbf\") pod \"control-plane-machine-set-operator-78cbb6b69f-5mwhk\" (UID: \"fd4bd624-70bb-4602-a4f3-6824c59f90a4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.659464 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9v6h\" (UniqueName: \"kubernetes.io/projected/8d219b79-0bde-4bac-ae6e-2e39a766d0e0-kube-api-access-s9v6h\") pod \"dns-operator-744455d44c-hn45s\" (UID: \"8d219b79-0bde-4bac-ae6e-2e39a766d0e0\") " pod="openshift-dns-operator/dns-operator-744455d44c-hn45s" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.703005 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.714230 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.714655 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-csi-data-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.714752 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zghxl\" (UniqueName: \"kubernetes.io/projected/343eb722-17b5-4a43-874c-2dc792355793-kube-api-access-zghxl\") pod \"machine-config-controller-84d6567774-p9lxv\" (UID: \"343eb722-17b5-4a43-874c-2dc792355793\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.714785 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-socket-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.714810 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrndz\" (UniqueName: \"kubernetes.io/projected/ce996e23-feba-490d-9e1b-1ae772ed7886-kube-api-access-qrndz\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.714840 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/343eb722-17b5-4a43-874c-2dc792355793-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-p9lxv\" (UID: \"343eb722-17b5-4a43-874c-2dc792355793\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.714870 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svtdx\" (UniqueName: \"kubernetes.io/projected/0bc67a26-61e7-42bd-b765-2c992886ac63-kube-api-access-svtdx\") pod \"dns-default-nd7dc\" (UID: \"0bc67a26-61e7-42bd-b765-2c992886ac63\") " pod="openshift-dns/dns-default-nd7dc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.714907 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-mountpoint-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.714928 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-plugins-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.714972 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-registration-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.715003 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/98a4475c-0ffd-49e8-8cd1-e5636ccb174f-node-bootstrap-token\") pod \"machine-config-server-vrl4r\" (UID: \"98a4475c-0ffd-49e8-8cd1-e5636ccb174f\") " pod="openshift-machine-config-operator/machine-config-server-vrl4r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.715028 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0bc67a26-61e7-42bd-b765-2c992886ac63-metrics-tls\") pod \"dns-default-nd7dc\" (UID: \"0bc67a26-61e7-42bd-b765-2c992886ac63\") " pod="openshift-dns/dns-default-nd7dc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.715073 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/343eb722-17b5-4a43-874c-2dc792355793-proxy-tls\") pod \"machine-config-controller-84d6567774-p9lxv\" (UID: \"343eb722-17b5-4a43-874c-2dc792355793\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.715104 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0bc67a26-61e7-42bd-b765-2c992886ac63-config-volume\") pod \"dns-default-nd7dc\" (UID: \"0bc67a26-61e7-42bd-b765-2c992886ac63\") " pod="openshift-dns/dns-default-nd7dc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.715129 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sff7r\" (UniqueName: \"kubernetes.io/projected/98a4475c-0ffd-49e8-8cd1-e5636ccb174f-kube-api-access-sff7r\") pod \"machine-config-server-vrl4r\" (UID: \"98a4475c-0ffd-49e8-8cd1-e5636ccb174f\") " pod="openshift-machine-config-operator/machine-config-server-vrl4r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.715167 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/98a4475c-0ffd-49e8-8cd1-e5636ccb174f-certs\") pod \"machine-config-server-vrl4r\" (UID: \"98a4475c-0ffd-49e8-8cd1-e5636ccb174f\") " pod="openshift-machine-config-operator/machine-config-server-vrl4r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.715597 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-mountpoint-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: E0123 08:22:48.715731 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:49.215699727 +0000 UTC m=+154.788656165 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.715807 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-csi-data-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.716104 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-socket-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.717034 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/343eb722-17b5-4a43-874c-2dc792355793-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-p9lxv\" (UID: \"343eb722-17b5-4a43-874c-2dc792355793\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.717966 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-plugins-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.718035 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ce996e23-feba-490d-9e1b-1ae772ed7886-registration-dir\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.719164 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/98a4475c-0ffd-49e8-8cd1-e5636ccb174f-certs\") pod \"machine-config-server-vrl4r\" (UID: \"98a4475c-0ffd-49e8-8cd1-e5636ccb174f\") " pod="openshift-machine-config-operator/machine-config-server-vrl4r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.719366 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.720352 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0bc67a26-61e7-42bd-b765-2c992886ac63-config-volume\") pod \"dns-default-nd7dc\" (UID: \"0bc67a26-61e7-42bd-b765-2c992886ac63\") " pod="openshift-dns/dns-default-nd7dc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.721236 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wqht\" (UniqueName: \"kubernetes.io/projected/ed15b7dc-3a50-412e-a239-ed37611684a8-kube-api-access-6wqht\") pod \"catalog-operator-68c6474976-z87tl\" (UID: \"ed15b7dc-3a50-412e-a239-ed37611684a8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.721327 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0bc67a26-61e7-42bd-b765-2c992886ac63-metrics-tls\") pod \"dns-default-nd7dc\" (UID: \"0bc67a26-61e7-42bd-b765-2c992886ac63\") " pod="openshift-dns/dns-default-nd7dc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.722603 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/343eb722-17b5-4a43-874c-2dc792355793-proxy-tls\") pod \"machine-config-controller-84d6567774-p9lxv\" (UID: \"343eb722-17b5-4a43-874c-2dc792355793\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.726466 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/98a4475c-0ffd-49e8-8cd1-e5636ccb174f-node-bootstrap-token\") pod \"machine-config-server-vrl4r\" (UID: \"98a4475c-0ffd-49e8-8cd1-e5636ccb174f\") " pod="openshift-machine-config-operator/machine-config-server-vrl4r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.763551 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtndh\" (UniqueName: \"kubernetes.io/projected/0c1a3ec4-2523-4339-a252-99b7d88c8c93-kube-api-access-dtndh\") pod \"package-server-manager-789f6589d5-nkrcc\" (UID: \"0c1a3ec4-2523-4339-a252-99b7d88c8c93\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.763801 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.765546 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-hn45s" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.765959 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tnld\" (UniqueName: \"kubernetes.io/projected/0915c58a-b29f-4de3-b5a0-e9985a2fd699-kube-api-access-7tnld\") pod \"service-ca-operator-777779d784-gtl2r\" (UID: \"0915c58a-b29f-4de3-b5a0-e9985a2fd699\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.778281 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.786962 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwbjg\" (UniqueName: \"kubernetes.io/projected/96dd98a3-fa00-4d1f-ba55-ab03180cff4c-kube-api-access-bwbjg\") pod \"ingress-canary-wl6fc\" (UID: \"96dd98a3-fa00-4d1f-ba55-ab03180cff4c\") " pod="openshift-ingress-canary/ingress-canary-wl6fc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.787553 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-wl6fc" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.801848 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-bound-sa-token\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.816618 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: E0123 08:22:48.817167 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:49.317145423 +0000 UTC m=+154.890101791 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.821460 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjhbq\" (UniqueName: \"kubernetes.io/projected/88908809-4dd9-4e62-9c5b-1bf8b3cfdaed-kube-api-access-qjhbq\") pod \"router-default-5444994796-v2m5p\" (UID: \"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed\") " pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.826156 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm8sg\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-kube-api-access-xm8sg\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.858813 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.863156 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dk8p7\" (UniqueName: \"kubernetes.io/projected/7c9cf2f9-5ba9-4670-9240-25b3d24104c4-kube-api-access-dk8p7\") pod \"service-ca-9c57cc56f-7bjqm\" (UID: \"7c9cf2f9-5ba9-4670-9240-25b3d24104c4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.876880 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdmq6\" (UniqueName: \"kubernetes.io/projected/3537ecbf-d37c-458e-b460-8f72882bfd08-kube-api-access-xdmq6\") pod \"migrator-59844c95c7-nb2x5\" (UID: \"3537ecbf-d37c-458e-b460-8f72882bfd08\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nb2x5" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.879284 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwk4g\" (UniqueName: \"kubernetes.io/projected/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-kube-api-access-bwk4g\") pod \"collect-profiles-29485935-856kh\" (UID: \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.911150 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nb2x5" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.916798 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv5gl\" (UniqueName: \"kubernetes.io/projected/f34584c2-05c1-4311-9168-fb7fb86976d9-kube-api-access-cv5gl\") pod \"machine-config-operator-74547568cd-mrmw6\" (UID: \"f34584c2-05c1-4311-9168-fb7fb86976d9\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.917560 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:48 crc kubenswrapper[4711]: E0123 08:22:48.917997 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:49.417974602 +0000 UTC m=+154.990930970 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.925316 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.932372 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c17e743d-d33d-45c8-a15b-a6759f00838d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xcw55\" (UID: \"c17e743d-d33d-45c8-a15b-a6759f00838d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.932719 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.938302 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j"] Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.942926 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6"] Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.964702 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zghxl\" (UniqueName: \"kubernetes.io/projected/343eb722-17b5-4a43-874c-2dc792355793-kube-api-access-zghxl\") pod \"machine-config-controller-84d6567774-p9lxv\" (UID: \"343eb722-17b5-4a43-874c-2dc792355793\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.965065 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:22:48 crc kubenswrapper[4711]: I0123 08:22:48.983323 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrndz\" (UniqueName: \"kubernetes.io/projected/ce996e23-feba-490d-9e1b-1ae772ed7886-kube-api-access-qrndz\") pod \"csi-hostpathplugin-r9c8g\" (UID: \"ce996e23-feba-490d-9e1b-1ae772ed7886\") " pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.021228 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:49 crc kubenswrapper[4711]: E0123 08:22:49.021642 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:49.521625982 +0000 UTC m=+155.094582340 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.022438 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svtdx\" (UniqueName: \"kubernetes.io/projected/0bc67a26-61e7-42bd-b765-2c992886ac63-kube-api-access-svtdx\") pod \"dns-default-nd7dc\" (UID: \"0bc67a26-61e7-42bd-b765-2c992886ac63\") " pod="openshift-dns/dns-default-nd7dc" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.039115 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sff7r\" (UniqueName: \"kubernetes.io/projected/98a4475c-0ffd-49e8-8cd1-e5636ccb174f-kube-api-access-sff7r\") pod \"machine-config-server-vrl4r\" (UID: \"98a4475c-0ffd-49e8-8cd1-e5636ccb174f\") " pod="openshift-machine-config-operator/machine-config-server-vrl4r" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.043816 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.049699 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.109690 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.113880 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-nd7dc" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.131025 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:49 crc kubenswrapper[4711]: E0123 08:22:49.131625 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:49.63159998 +0000 UTC m=+155.204556348 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.141843 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.148826 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-vrl4r" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.150684 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-gxhxb"] Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.201777 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.234536 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:49 crc kubenswrapper[4711]: E0123 08:22:49.235068 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:49.735053266 +0000 UTC m=+155.308009634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.342667 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:49 crc kubenswrapper[4711]: E0123 08:22:49.343182 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:49.843157778 +0000 UTC m=+155.416114146 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.417146 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx"] Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.446725 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:49 crc kubenswrapper[4711]: E0123 08:22:49.447349 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:49.947329901 +0000 UTC m=+155.520286269 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:49 crc kubenswrapper[4711]: W0123 08:22:49.455875 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98a4475c_0ffd_49e8_8cd1_e5636ccb174f.slice/crio-d1dc6b8088a8c59249f18201a028c7b37767c98e639a5051bcbfae80858146f4 WatchSource:0}: Error finding container d1dc6b8088a8c59249f18201a028c7b37767c98e639a5051bcbfae80858146f4: Status 404 returned error can't find the container with id d1dc6b8088a8c59249f18201a028c7b37767c98e639a5051bcbfae80858146f4 Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.546469 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" event={"ID":"b328000b-5587-4645-a3b6-02397de51cf6","Type":"ContainerStarted","Data":"e09f43dbf78d38d2f610d845ed1964c7ab19280b10430b0f68a2f5925492bfaa"} Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.547638 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.547658 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:49 crc kubenswrapper[4711]: E0123 08:22:49.548062 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:50.048042897 +0000 UTC m=+155.620999265 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.568922 4711 csr.go:261] certificate signing request csr-2dv7z is approved, waiting to be issued Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.573216 4711 csr.go:257] certificate signing request csr-2dv7z is issued Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.606259 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.607369 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-vrl4r" event={"ID":"98a4475c-0ffd-49e8-8cd1-e5636ccb174f","Type":"ContainerStarted","Data":"d1dc6b8088a8c59249f18201a028c7b37767c98e639a5051bcbfae80858146f4"} Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.649328 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:49 crc kubenswrapper[4711]: E0123 08:22:49.650458 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:50.150444416 +0000 UTC m=+155.723400774 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:49 crc kubenswrapper[4711]: W0123 08:22:49.739176 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce0cb782_5b0e_4dfd_8c39_8ad6d6787d58.slice/crio-a6553486c7e9671a80e11c354785e7802bf6adbe2d7bf3f1897d9390dc3207c1 WatchSource:0}: Error finding container a6553486c7e9671a80e11c354785e7802bf6adbe2d7bf3f1897d9390dc3207c1: Status 404 returned error can't find the container with id a6553486c7e9671a80e11c354785e7802bf6adbe2d7bf3f1897d9390dc3207c1 Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.747779 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-gxhxb" event={"ID":"9b990ad9-6046-4ef0-bf53-1a5a74c9d0d8","Type":"ContainerStarted","Data":"16310b931fd364a38410862df690e9247e9fe41a4a7d6e9ffc6d2d25e4511179"} Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.751248 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:49 crc kubenswrapper[4711]: E0123 08:22:49.751804 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:50.251622334 +0000 UTC m=+155.824578702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.764320 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-txv7j" podStartSLOduration=133.764297772 podStartE2EDuration="2m13.764297772s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:49.714449622 +0000 UTC m=+155.287405990" watchObservedRunningTime="2026-01-23 08:22:49.764297772 +0000 UTC m=+155.337254140" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.770885 4711 generic.go:334] "Generic (PLEG): container finished" podID="3578bb73-066e-43b9-85db-a5989823d8d1" containerID="e8e508015407ee22bb585369673f9b525d7287bccff28a61495370de50d998b0" exitCode=0 Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.771696 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" event={"ID":"3578bb73-066e-43b9-85db-a5989823d8d1","Type":"ContainerDied","Data":"e8e508015407ee22bb585369673f9b525d7287bccff28a61495370de50d998b0"} Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.825685 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" event={"ID":"72195d0a-6291-4d31-be15-10b066538f0e","Type":"ContainerStarted","Data":"7ec94a2e643b3bfad4b0f887f0813c230a0b6738e67856cac0743c4909c8f678"} Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.840646 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" event={"ID":"be1b64cc-b8d5-429c-8189-542268f1d7a2","Type":"ContainerStarted","Data":"c19aba178eeb91fd573e323012cf4609cf3fd506f4d40d7a3350fd7661c0845d"} Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.854638 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:49 crc kubenswrapper[4711]: E0123 08:22:49.855624 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:50.355607634 +0000 UTC m=+155.928564012 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.871134 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" event={"ID":"a44c144e-22c9-44be-88bd-408c82ed0e0a","Type":"ContainerStarted","Data":"245e487840414ef2e9271054d6e7060533d42f4d99a6eeef0e374cdf6cc49c6c"} Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.872324 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" event={"ID":"86145ace-e2d3-4b5b-9475-f52b19faa9df","Type":"ContainerStarted","Data":"afadebbd640434a996a5a893e437e307dfc813dbccdae4ea361bb3cb6e790178"} Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.875540 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.891019 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.897851 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" event={"ID":"99ae132d-a196-46f2-aae4-910e03935ee9","Type":"ContainerStarted","Data":"a2f1a668473da708912b1fe10bc86f8625648e8955acd7dacfb3a45c9d08bbdc"} Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.912808 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" event={"ID":"d4183796-cb3c-4979-8336-9e15ba18f37a","Type":"ContainerStarted","Data":"7564a08b2966ef3977c207550051c16c15c20efbc2aae441fda891538484bcbc"} Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.959320 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:49 crc kubenswrapper[4711]: E0123 08:22:49.959423 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:50.459399087 +0000 UTC m=+156.032355455 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.959749 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:49 crc kubenswrapper[4711]: E0123 08:22:49.961458 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:50.461449438 +0000 UTC m=+156.034405806 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.972166 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-v2m5p" event={"ID":"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed","Type":"ContainerStarted","Data":"1ae56b9eece15f5bc21adb71152b58a9abe0c7d3f1e517ca6b9254eb9cdeea7e"} Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.985855 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" event={"ID":"9afbdb97-d93a-494f-8ad0-23179afbee6d","Type":"ContainerStarted","Data":"b2df594a13d85c2bd5a186465cd199c0b2fab247fa03b0b6d9335970b17179b6"} Jan 23 08:22:49 crc kubenswrapper[4711]: I0123 08:22:49.993089 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-f76vr" podStartSLOduration=133.993066042 podStartE2EDuration="2m13.993066042s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:49.991898182 +0000 UTC m=+155.564854550" watchObservedRunningTime="2026-01-23 08:22:49.993066042 +0000 UTC m=+155.566022420" Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.023734 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" event={"ID":"be88d29a-82f0-448c-bad5-545df560740b","Type":"ContainerStarted","Data":"85de44d3c99d1c68a6de8a6065e3059955743e18cb0c873162b2cc4c599017fb"} Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.065722 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:50 crc kubenswrapper[4711]: E0123 08:22:50.067091 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:50.567069848 +0000 UTC m=+156.140026226 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.135261 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-sk8zj" event={"ID":"a7e00bfd-844d-4264-aff6-d2bdb6673084","Type":"ContainerStarted","Data":"9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56"} Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.135308 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-sk8zj" event={"ID":"a7e00bfd-844d-4264-aff6-d2bdb6673084","Type":"ContainerStarted","Data":"01efb0ced0d4fb94c7b6118295641b3d846760da66ddf13b1154229b1bd7afa0"} Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.163620 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-f76vr" Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.176545 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:50 crc kubenswrapper[4711]: E0123 08:22:50.225638 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:50.725616425 +0000 UTC m=+156.298572793 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.278601 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:50 crc kubenswrapper[4711]: E0123 08:22:50.278987 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:50.778960433 +0000 UTC m=+156.351916801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.380444 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:50 crc kubenswrapper[4711]: E0123 08:22:50.381266 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:50.881247039 +0000 UTC m=+156.454203407 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.483586 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:50 crc kubenswrapper[4711]: E0123 08:22:50.484029 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:50.983812751 +0000 UTC m=+156.556769119 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.579639 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-23 08:17:49 +0000 UTC, rotation deadline is 2026-11-21 20:42:08.201343357 +0000 UTC Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.580051 4711 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 7260h19m17.621296601s for next certificate rotation Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.589937 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:50 crc kubenswrapper[4711]: E0123 08:22:50.590428 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:51.090411725 +0000 UTC m=+156.663368093 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.691750 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:50 crc kubenswrapper[4711]: E0123 08:22:50.693322 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:51.193306887 +0000 UTC m=+156.766263255 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.785441 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" podStartSLOduration=133.785417778 podStartE2EDuration="2m13.785417778s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:50.78388761 +0000 UTC m=+156.356843978" watchObservedRunningTime="2026-01-23 08:22:50.785417778 +0000 UTC m=+156.358374146" Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.793238 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:50 crc kubenswrapper[4711]: E0123 08:22:50.793654 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:51.293641354 +0000 UTC m=+156.866597722 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.895114 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:50 crc kubenswrapper[4711]: E0123 08:22:50.895639 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:51.395606292 +0000 UTC m=+156.968562660 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.902940 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:50 crc kubenswrapper[4711]: E0123 08:22:50.903451 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:51.403435398 +0000 UTC m=+156.976391766 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.931147 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6"] Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.932530 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-6rpqs" podStartSLOduration=134.932492118 podStartE2EDuration="2m14.932492118s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:50.927376629 +0000 UTC m=+156.500332997" watchObservedRunningTime="2026-01-23 08:22:50.932492118 +0000 UTC m=+156.505448486" Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.951248 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv"] Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.952927 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tjf8f" podStartSLOduration=134.952907509 podStartE2EDuration="2m14.952907509s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:50.943131814 +0000 UTC m=+156.516088182" watchObservedRunningTime="2026-01-23 08:22:50.952907509 +0000 UTC m=+156.525863877" Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.983808 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k"] Jan 23 08:22:50 crc kubenswrapper[4711]: I0123 08:22:50.991203 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" podStartSLOduration=133.991165889 podStartE2EDuration="2m13.991165889s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:50.981649281 +0000 UTC m=+156.554605649" watchObservedRunningTime="2026-01-23 08:22:50.991165889 +0000 UTC m=+156.564122257" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.004787 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:51 crc kubenswrapper[4711]: E0123 08:22:51.005270 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:51.505248383 +0000 UTC m=+157.078204751 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.016710 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q"] Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.030858 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" podStartSLOduration=134.030831194 podStartE2EDuration="2m14.030831194s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:51.009604332 +0000 UTC m=+156.582560700" watchObservedRunningTime="2026-01-23 08:22:51.030831194 +0000 UTC m=+156.603787572" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.034990 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc"] Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.106163 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:51 crc kubenswrapper[4711]: E0123 08:22:51.106594 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:51.606579954 +0000 UTC m=+157.179536322 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.144671 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" podStartSLOduration=134.144649879 podStartE2EDuration="2m14.144649879s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:51.143369807 +0000 UTC m=+156.716326175" watchObservedRunningTime="2026-01-23 08:22:51.144649879 +0000 UTC m=+156.717606247" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.145442 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-sk8zj" podStartSLOduration=135.145438319 podStartE2EDuration="2m15.145438319s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:51.056114898 +0000 UTC m=+156.629071266" watchObservedRunningTime="2026-01-23 08:22:51.145438319 +0000 UTC m=+156.718394687" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.180699 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" event={"ID":"ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58","Type":"ContainerStarted","Data":"b35c5a82a3f763dc22085d95f5ada3af52e6b55e7e9770ea1d65943f5a6b5219"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.180748 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" event={"ID":"ce0cb782-5b0e-4dfd-8c39-8ad6d6787d58","Type":"ContainerStarted","Data":"a6553486c7e9671a80e11c354785e7802bf6adbe2d7bf3f1897d9390dc3207c1"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.209655 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.210500 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-vrl4r" event={"ID":"98a4475c-0ffd-49e8-8cd1-e5636ccb174f","Type":"ContainerStarted","Data":"1f63c4e06df57320b662eebc86351e8c624e9c0bf977900fd18cacd05cd17212"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.216219 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" event={"ID":"a44c144e-22c9-44be-88bd-408c82ed0e0a","Type":"ContainerStarted","Data":"951d6d6d8a0a89a4328e1c67a24c086c56e3136f62f98e4073ccd805e30964c3"} Jan 23 08:22:51 crc kubenswrapper[4711]: E0123 08:22:51.283365 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:51.783208575 +0000 UTC m=+157.356164943 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.295658 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fzkjs" event={"ID":"9afbdb97-d93a-494f-8ad0-23179afbee6d","Type":"ContainerStarted","Data":"b807e7cca6f910a332daf453003dada10594c56e155a6f0067a1d2d87fcf8cda"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.297478 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" event={"ID":"fde6fa25-f831-4858-96d5-c549d889c4c9","Type":"ContainerStarted","Data":"c2c04f0b219cc3279b6f92679f6cc5aa211a8a6908c83fa549b2a30af10b535b"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.308218 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j" event={"ID":"341a7457-1e1c-4f8c-81b8-850161798640","Type":"ContainerStarted","Data":"8145634764b7cc256617cec03bbf88664ed6e985f015c4f7a9da154de112c34c"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.308292 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j" event={"ID":"341a7457-1e1c-4f8c-81b8-850161798640","Type":"ContainerStarted","Data":"dd932844c8f728caef47a163f5a9eb1bcd2702fa94944a9ab36fd5d0eb258b3f"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.315207 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:51 crc kubenswrapper[4711]: E0123 08:22:51.316848 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:51.816827948 +0000 UTC m=+157.389784326 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.324384 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-zw7wx" podStartSLOduration=134.324363618 podStartE2EDuration="2m14.324363618s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:51.239779426 +0000 UTC m=+156.812735794" watchObservedRunningTime="2026-01-23 08:22:51.324363618 +0000 UTC m=+156.897319986" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.324525 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-vrl4r" podStartSLOduration=5.324522232 podStartE2EDuration="5.324522232s" podCreationTimestamp="2026-01-23 08:22:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:51.322633684 +0000 UTC m=+156.895590282" watchObservedRunningTime="2026-01-23 08:22:51.324522232 +0000 UTC m=+156.897478600" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.326043 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" event={"ID":"3578bb73-066e-43b9-85db-a5989823d8d1","Type":"ContainerStarted","Data":"f8d521a5bdd4f8b0c36b7550e6260614f087176196731c3a6fafd1c8dd600ade"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.340321 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-v2m5p" event={"ID":"88908809-4dd9-4e62-9c5b-1bf8b3cfdaed","Type":"ContainerStarted","Data":"ef5c4c1225c457619b762c901a022cc870e81159e2996026b112ed5163d603e9"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.341496 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" event={"ID":"ccd74590-a19d-4c99-bf60-59f66e85a484","Type":"ContainerStarted","Data":"b33343b152aacdf38a5616b59b87a1e14184a32c7d61fb1b597a012ed6a961e3"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.343464 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-gxhxb" event={"ID":"9b990ad9-6046-4ef0-bf53-1a5a74c9d0d8","Type":"ContainerStarted","Data":"a8df9a46d2bb10e9b0b0da865eee3e2c898218508c403cabd37cabffccac26f4"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.343544 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-gxhxb" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.346869 4711 patch_prober.go:28] interesting pod/downloads-7954f5f757-gxhxb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.24:8080/\": dial tcp 10.217.0.24:8080: connect: connection refused" start-of-body= Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.346919 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gxhxb" podUID="9b990ad9-6046-4ef0-bf53-1a5a74c9d0d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.24:8080/\": dial tcp 10.217.0.24:8080: connect: connection refused" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.367877 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" event={"ID":"99ae132d-a196-46f2-aae4-910e03935ee9","Type":"ContainerStarted","Data":"1e4db72e17c330a886c3d3df009247b60c34e26c6535d936ea1dc51d61e89a4a"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.375702 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-z4ppj" podStartSLOduration=134.375681465 podStartE2EDuration="2m14.375681465s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:51.374904306 +0000 UTC m=+156.947860674" watchObservedRunningTime="2026-01-23 08:22:51.375681465 +0000 UTC m=+156.948637833" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.385829 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" event={"ID":"be1b64cc-b8d5-429c-8189-542268f1d7a2","Type":"ContainerStarted","Data":"388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f"} Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.385886 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.414331 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-v2m5p" podStartSLOduration=134.414312254 podStartE2EDuration="2m14.414312254s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:51.411834012 +0000 UTC m=+156.984790380" watchObservedRunningTime="2026-01-23 08:22:51.414312254 +0000 UTC m=+156.987268622" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.421986 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:51 crc kubenswrapper[4711]: E0123 08:22:51.423629 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:51.923607187 +0000 UTC m=+157.496563555 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.499218 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-p5sq6" podStartSLOduration=134.499184812 podStartE2EDuration="2m14.499184812s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:51.498297091 +0000 UTC m=+157.071253459" watchObservedRunningTime="2026-01-23 08:22:51.499184812 +0000 UTC m=+157.072141190" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.500762 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-gxhxb" podStartSLOduration=135.500752543 podStartE2EDuration="2m15.500752543s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:51.463068257 +0000 UTC m=+157.036024625" watchObservedRunningTime="2026-01-23 08:22:51.500752543 +0000 UTC m=+157.073708911" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.524770 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.534480 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j" podStartSLOduration=135.534453078 podStartE2EDuration="2m15.534453078s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:51.520771645 +0000 UTC m=+157.093728013" watchObservedRunningTime="2026-01-23 08:22:51.534453078 +0000 UTC m=+157.107409436" Jan 23 08:22:51 crc kubenswrapper[4711]: E0123 08:22:51.549920 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:52.049874855 +0000 UTC m=+157.622831223 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.551494 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" podStartSLOduration=135.551467005 podStartE2EDuration="2m15.551467005s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:51.548957151 +0000 UTC m=+157.121913529" watchObservedRunningTime="2026-01-23 08:22:51.551467005 +0000 UTC m=+157.124423383" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.629412 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:51 crc kubenswrapper[4711]: E0123 08:22:51.630259 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:52.13023804 +0000 UTC m=+157.703194408 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.736356 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:51 crc kubenswrapper[4711]: E0123 08:22:51.736806 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:52.236784663 +0000 UTC m=+157.809741031 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.837458 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:51 crc kubenswrapper[4711]: E0123 08:22:51.837947 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:52.33792654 +0000 UTC m=+157.910882908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.869329 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.870873 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc"] Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.882717 4711 patch_prober.go:28] interesting pod/router-default-5444994796-v2m5p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 23 08:22:51 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Jan 23 08:22:51 crc kubenswrapper[4711]: [+]process-running ok Jan 23 08:22:51 crc kubenswrapper[4711]: healthz check failed Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.883207 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-v2m5p" podUID="88908809-4dd9-4e62-9c5b-1bf8b3cfdaed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 23 08:22:51 crc kubenswrapper[4711]: I0123 08:22:51.943893 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:51 crc kubenswrapper[4711]: E0123 08:22:51.944334 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:52.444315989 +0000 UTC m=+158.017272357 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.019267 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.045967 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:52 crc kubenswrapper[4711]: E0123 08:22:52.046150 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:52.546121433 +0000 UTC m=+158.119077801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.046253 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:52 crc kubenswrapper[4711]: E0123 08:22:52.046714 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:52.546704138 +0000 UTC m=+158.119660516 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.059648 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk"] Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.059729 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n"] Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.074586 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zg5c9"] Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.153028 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:52 crc kubenswrapper[4711]: E0123 08:22:52.205584 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:52.705543322 +0000 UTC m=+158.278499690 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.308380 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:52 crc kubenswrapper[4711]: E0123 08:22:52.308804 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:52.808788532 +0000 UTC m=+158.381744900 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.369819 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hn45s"] Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.409411 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:52 crc kubenswrapper[4711]: E0123 08:22:52.409929 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:52.909902439 +0000 UTC m=+158.482858817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.398487 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55"] Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.458902 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" event={"ID":"fde6fa25-f831-4858-96d5-c549d889c4c9","Type":"ContainerStarted","Data":"7646af647cdc17d9f6fd93a845f920414827678bdf580a791d985ba163ba7dca"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.460265 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" event={"ID":"ccd74590-a19d-4c99-bf60-59f66e85a484","Type":"ContainerStarted","Data":"c937e817c136e12dc56014f282b2bd9388800a4dbc3726ff8710b5102b8c4ceb"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.470224 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk" event={"ID":"fd4bd624-70bb-4602-a4f3-6824c59f90a4","Type":"ContainerStarted","Data":"384d2117920ce3cb3297b1a22f094c041e1dee0176298cfe5d875c923d61c61a"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.483814 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" event={"ID":"df27a7ac-56ad-458a-8954-4177f65db5ac","Type":"ContainerStarted","Data":"a136827c674b987ae4570b55bafef113e7a549f43c5563e5a17ff06c53f8bc26"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.483901 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" event={"ID":"df27a7ac-56ad-458a-8954-4177f65db5ac","Type":"ContainerStarted","Data":"36b80e654f792a7f9a9ccbcfb699a46b5c19eb68c891973685869a82dca66271"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.492049 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.492475 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.502854 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cssg6" podStartSLOduration=136.502828149 podStartE2EDuration="2m16.502828149s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:52.491809083 +0000 UTC m=+158.064765451" watchObservedRunningTime="2026-01-23 08:22:52.502828149 +0000 UTC m=+158.075784517" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.513345 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:52 crc kubenswrapper[4711]: E0123 08:22:52.513889 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.013868936 +0000 UTC m=+158.586825304 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.514568 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" event={"ID":"69261199-17d1-4122-ad29-ef7417a0f25e","Type":"ContainerStarted","Data":"06388349c28ae6560a082bee63a62229f32447f3f8bb4810102344f523a7ba9c"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.514612 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" event={"ID":"69261199-17d1-4122-ad29-ef7417a0f25e","Type":"ContainerStarted","Data":"d5e015c927f4730c81028ddc739375c1d0ed5687ef89efbf2d3274b45c874a1c"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.515289 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.523710 4711 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-z2ndc container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.523794 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" podUID="69261199-17d1-4122-ad29-ef7417a0f25e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.524102 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" event={"ID":"0c1a3ec4-2523-4339-a252-99b7d88c8c93","Type":"ContainerStarted","Data":"4e359d7fa3884d8cf4383815ade33d5a3af083aa00173a1f54e8be799fa19b5c"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.540978 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" event={"ID":"238d099f-eb44-4b83-a996-647f7adad7d1","Type":"ContainerStarted","Data":"1c25d558f56bea53831af317acaeaf2970d7c99be071ec1983b4ec72f8d15010"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.560193 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" event={"ID":"3578bb73-066e-43b9-85db-a5989823d8d1","Type":"ContainerStarted","Data":"e9a3a259347ea535828b1238d2d0b88a0d758937ccd495fe138789d7c402b524"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.573028 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" event={"ID":"d662cd8f-d996-413a-89c8-559898662622","Type":"ContainerStarted","Data":"f1614fbdf0da929fd5d289eb6baddd07bb3179fa29ef7d97ec391011d26fed92"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.573072 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" event={"ID":"d662cd8f-d996-413a-89c8-559898662622","Type":"ContainerStarted","Data":"43e46dffebad0c26325ca3386ebb657facaadad90fa4d295ff8582942af77995"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.587733 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nrm2j" event={"ID":"341a7457-1e1c-4f8c-81b8-850161798640","Type":"ContainerStarted","Data":"8d71e29c2e93dc40d47092a5b697f19dd9446ed6cd68ca8d89de1eaad0927d75"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.592614 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" event={"ID":"82d719a5-f69c-4ffa-8bab-5c73841665ee","Type":"ContainerStarted","Data":"50b6b7aeb8ac0a1f51cd719d1e1c6384c474a153bbc8222148855ec0ca525144"} Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.594662 4711 patch_prober.go:28] interesting pod/downloads-7954f5f757-gxhxb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.24:8080/\": dial tcp 10.217.0.24:8080: connect: connection refused" start-of-body= Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.594781 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gxhxb" podUID="9b990ad9-6046-4ef0-bf53-1a5a74c9d0d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.24:8080/\": dial tcp 10.217.0.24:8080: connect: connection refused" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.596337 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" podStartSLOduration=135.596326785 podStartE2EDuration="2m15.596326785s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:52.593920734 +0000 UTC m=+158.166877102" watchObservedRunningTime="2026-01-23 08:22:52.596326785 +0000 UTC m=+158.169283153" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.614613 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:52 crc kubenswrapper[4711]: E0123 08:22:52.616054 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.116020659 +0000 UTC m=+158.688977027 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.618243 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9wx2k" podStartSLOduration=135.618226445 podStartE2EDuration="2m15.618226445s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:52.617919806 +0000 UTC m=+158.190876184" watchObservedRunningTime="2026-01-23 08:22:52.618226445 +0000 UTC m=+158.191182813" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.665271 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.665654 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.666633 4711 patch_prober.go:28] interesting pod/apiserver-76f77b778f-7nvn6 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.666681 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" podUID="3578bb73-066e-43b9-85db-a5989823d8d1" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.716817 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:52 crc kubenswrapper[4711]: E0123 08:22:52.718034 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.218018938 +0000 UTC m=+158.790975306 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.818222 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:52 crc kubenswrapper[4711]: E0123 08:22:52.818468 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.318423507 +0000 UTC m=+158.891379885 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.862914 4711 patch_prober.go:28] interesting pod/router-default-5444994796-v2m5p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 23 08:22:52 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Jan 23 08:22:52 crc kubenswrapper[4711]: [+]process-running ok Jan 23 08:22:52 crc kubenswrapper[4711]: healthz check failed Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.863050 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-v2m5p" podUID="88908809-4dd9-4e62-9c5b-1bf8b3cfdaed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 23 08:22:52 crc kubenswrapper[4711]: I0123 08:22:52.920207 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:52 crc kubenswrapper[4711]: E0123 08:22:52.920791 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.420769714 +0000 UTC m=+158.993726082 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.021031 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.021282 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.521247895 +0000 UTC m=+159.094204263 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.021631 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.022071 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.522052894 +0000 UTC m=+159.095009262 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.062308 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" podStartSLOduration=137.062283704 podStartE2EDuration="2m17.062283704s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:52.645831576 +0000 UTC m=+158.218787944" watchObservedRunningTime="2026-01-23 08:22:53.062283704 +0000 UTC m=+158.635240072" Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.064787 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-wl6fc"] Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.077063 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:53 crc kubenswrapper[4711]: W0123 08:22:53.082131 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96dd98a3_fa00_4d1f_ba55_ab03180cff4c.slice/crio-2ce2644b38a03af3fb99a3c968da3f77c51b237ff4e15998bc71e73ba239f75e WatchSource:0}: Error finding container 2ce2644b38a03af3fb99a3c968da3f77c51b237ff4e15998bc71e73ba239f75e: Status 404 returned error can't find the container with id 2ce2644b38a03af3fb99a3c968da3f77c51b237ff4e15998bc71e73ba239f75e Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.083699 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-nd7dc"] Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.103549 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl"] Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.105799 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-s6xwh"] Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.108045 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv"] Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.115846 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-nb2x5"] Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.123082 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.123549 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.62352236 +0000 UTC m=+159.196478738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.125133 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rhrsx"] Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.153727 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-7bjqm"] Jan 23 08:22:53 crc kubenswrapper[4711]: W0123 08:22:53.167445 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod343eb722_17b5_4a43_874c_2dc792355793.slice/crio-015246f52b2021e5279407f04b1ef328342681407a570967a9edb9a7d9a24f34 WatchSource:0}: Error finding container 015246f52b2021e5279407f04b1ef328342681407a570967a9edb9a7d9a24f34: Status 404 returned error can't find the container with id 015246f52b2021e5279407f04b1ef328342681407a570967a9edb9a7d9a24f34 Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.169876 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh"] Jan 23 08:22:53 crc kubenswrapper[4711]: W0123 08:22:53.171087 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f985710_d7f9_4d47_bab6_12cea6e28ae9.slice/crio-626b137234c9e0ee0c99649738b55e3803868a23f63fba8321b437ea1a3debb3 WatchSource:0}: Error finding container 626b137234c9e0ee0c99649738b55e3803868a23f63fba8321b437ea1a3debb3: Status 404 returned error can't find the container with id 626b137234c9e0ee0c99649738b55e3803868a23f63fba8321b437ea1a3debb3 Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.184210 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r"] Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.221880 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-r9c8g"] Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.226600 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6"] Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.226933 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.227416 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.727382255 +0000 UTC m=+159.300338623 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.330220 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.330442 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.8304178 +0000 UTC m=+159.403374168 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.330708 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.331059 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.831050446 +0000 UTC m=+159.404006814 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.431118 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.431307 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.93127562 +0000 UTC m=+159.504231988 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.431365 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.431793 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:53.931778223 +0000 UTC m=+159.504734591 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.532473 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.532714 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:54.032674824 +0000 UTC m=+159.605631202 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.599598 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" event={"ID":"c17e743d-d33d-45c8-a15b-a6759f00838d","Type":"ContainerStarted","Data":"ccb597cd1bdfcbcef376e84a42434d47d7e64bdd40d18099f3cfd6a2704fdeb5"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.610185 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nb2x5" event={"ID":"3537ecbf-d37c-458e-b460-8f72882bfd08","Type":"ContainerStarted","Data":"6c01e82e9c135bc1eac045b907b22c8eb37658d939ccf125642c95d59f7c82ed"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.611240 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" event={"ID":"9f985710-d7f9-4d47-bab6-12cea6e28ae9","Type":"ContainerStarted","Data":"626b137234c9e0ee0c99649738b55e3803868a23f63fba8321b437ea1a3debb3"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.613135 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" event={"ID":"e1ab258d-ba2f-434d-8a15-2a08f24d03cb","Type":"ContainerStarted","Data":"d0260664dd01a6585928594c698c8cd2d5ded01f9b9fe8436eaa3105184dc63d"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.633716 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.634125 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:54.134107238 +0000 UTC m=+159.707063606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.634468 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" event={"ID":"ce996e23-feba-490d-9e1b-1ae772ed7886","Type":"ContainerStarted","Data":"0c74faea38e986c100c71b3add3e87a9ddbe05f146a00570836a2d4e9fc3bcea"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.639992 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" event={"ID":"f34584c2-05c1-4311-9168-fb7fb86976d9","Type":"ContainerStarted","Data":"bc59115d98f848fd70d1df827dbb25054eca586f2d3fd1e1e737b2b2734faca9"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.640995 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" event={"ID":"ed15b7dc-3a50-412e-a239-ed37611684a8","Type":"ContainerStarted","Data":"7853f72805aeb4593e88a31f4abc487a6790d8d92d557010555e02e3359df0de"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.643918 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" event={"ID":"7c9cf2f9-5ba9-4670-9240-25b3d24104c4","Type":"ContainerStarted","Data":"427e83acdc253550a2090da4f26e6bad2aa6fde402d412793264f2d6705c7b11"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.644654 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" event={"ID":"0915c58a-b29f-4de3-b5a0-e9985a2fd699","Type":"ContainerStarted","Data":"e21827a33e85598320c64494383be26e5f2d5c56507d5511fa47d82ba1746416"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.670833 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" event={"ID":"8b4e79a6-ff8b-4293-931b-bde9f25b7576","Type":"ContainerStarted","Data":"ffa90f8803ff0f5a8c74c6101f11246783a21fb7d64001c90dfa91a3b71a7834"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.677132 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" event={"ID":"343eb722-17b5-4a43-874c-2dc792355793","Type":"ContainerStarted","Data":"015246f52b2021e5279407f04b1ef328342681407a570967a9edb9a7d9a24f34"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.679325 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-wl6fc" event={"ID":"96dd98a3-fa00-4d1f-ba55-ab03180cff4c","Type":"ContainerStarted","Data":"2ce2644b38a03af3fb99a3c968da3f77c51b237ff4e15998bc71e73ba239f75e"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.680372 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-nd7dc" event={"ID":"0bc67a26-61e7-42bd-b765-2c992886ac63","Type":"ContainerStarted","Data":"94d0f0645b401ea6a99b6e9d445a319e2df3678065246f324188edd02ee18d01"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.682347 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hn45s" event={"ID":"8d219b79-0bde-4bac-ae6e-2e39a766d0e0","Type":"ContainerStarted","Data":"251fdcd86d65a2d8b0b4a97122337fac8e98c019d70d2b5f00b0bf7e975448a6"} Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.684827 4711 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-z2ndc container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.684873 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" podUID="69261199-17d1-4122-ad29-ef7417a0f25e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.713064 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ppc4l" Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.735306 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.735715 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:54.235693927 +0000 UTC m=+159.808650295 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.837055 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.841106 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:54.34108515 +0000 UTC m=+159.914041518 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.865834 4711 patch_prober.go:28] interesting pod/router-default-5444994796-v2m5p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 23 08:22:53 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Jan 23 08:22:53 crc kubenswrapper[4711]: [+]process-running ok Jan 23 08:22:53 crc kubenswrapper[4711]: healthz check failed Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.865902 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-v2m5p" podUID="88908809-4dd9-4e62-9c5b-1bf8b3cfdaed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 23 08:22:53 crc kubenswrapper[4711]: I0123 08:22:53.955049 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:53 crc kubenswrapper[4711]: E0123 08:22:53.955854 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:54.455825649 +0000 UTC m=+160.028782017 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.056691 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:54 crc kubenswrapper[4711]: E0123 08:22:54.057069 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:54.557048247 +0000 UTC m=+160.130004615 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.158168 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:54 crc kubenswrapper[4711]: E0123 08:22:54.158352 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:54.658328609 +0000 UTC m=+160.231284977 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.158423 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:54 crc kubenswrapper[4711]: E0123 08:22:54.158870 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:54.658856952 +0000 UTC m=+160.231813320 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.259761 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:54 crc kubenswrapper[4711]: E0123 08:22:54.260231 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:54.760208294 +0000 UTC m=+160.333164662 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.363816 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:54 crc kubenswrapper[4711]: E0123 08:22:54.364236 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:54.864209583 +0000 UTC m=+160.437165951 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.464795 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:54 crc kubenswrapper[4711]: E0123 08:22:54.465159 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:54.965136425 +0000 UTC m=+160.538092793 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.566563 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:54 crc kubenswrapper[4711]: E0123 08:22:54.567080 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:55.067058872 +0000 UTC m=+160.640015230 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.675918 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:54 crc kubenswrapper[4711]: E0123 08:22:54.676467 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:55.176446946 +0000 UTC m=+160.749403314 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.741417 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" event={"ID":"0c1a3ec4-2523-4339-a252-99b7d88c8c93","Type":"ContainerStarted","Data":"c71d9c6e3209844b3636bd0dcc0ab2c2d91446ce69a3e617947d9ac4cd5082e8"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.748363 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" event={"ID":"82d719a5-f69c-4ffa-8bab-5c73841665ee","Type":"ContainerStarted","Data":"001b23e9c124eeb0f21905d98912cc7d72bd455ba2b3b9f7903db594aa4fcf11"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.751702 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hn45s" event={"ID":"8d219b79-0bde-4bac-ae6e-2e39a766d0e0","Type":"ContainerStarted","Data":"1f3cdf4925deab5148aa14e35d1004cef66d903bd6296143a3bf53daef06f06e"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.753250 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" event={"ID":"8b4e79a6-ff8b-4293-931b-bde9f25b7576","Type":"ContainerStarted","Data":"f747b0088ff8904abc130442a0b31eb0267e13f212d15cd048f15a78b8251ff2"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.754186 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.757525 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk" event={"ID":"fd4bd624-70bb-4602-a4f3-6824c59f90a4","Type":"ContainerStarted","Data":"524d32c2e293eb8e59a73d3f2058ae1004f5749054ee2dce8cd53ad41d846137"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.759539 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nb2x5" event={"ID":"3537ecbf-d37c-458e-b460-8f72882bfd08","Type":"ContainerStarted","Data":"3b3d01fb30eb5970ad625d316a5e083599e547ae83f6bcc6fd70276d0c277ae5"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.762437 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" event={"ID":"ed15b7dc-3a50-412e-a239-ed37611684a8","Type":"ContainerStarted","Data":"e53fc0a86115e772753f8b8cd32070b6ad255688356d80e6a34ce111f764b416"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.763221 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.764532 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" event={"ID":"f34584c2-05c1-4311-9168-fb7fb86976d9","Type":"ContainerStarted","Data":"ffecb64693963db6a53a63ceb2b481234f585d66ff2127b1f1b6c321ce13f53b"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.768409 4711 generic.go:334] "Generic (PLEG): container finished" podID="df27a7ac-56ad-458a-8954-4177f65db5ac" containerID="a136827c674b987ae4570b55bafef113e7a549f43c5563e5a17ff06c53f8bc26" exitCode=0 Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.768456 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" event={"ID":"df27a7ac-56ad-458a-8954-4177f65db5ac","Type":"ContainerDied","Data":"a136827c674b987ae4570b55bafef113e7a549f43c5563e5a17ff06c53f8bc26"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.772396 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-wl6fc" event={"ID":"96dd98a3-fa00-4d1f-ba55-ab03180cff4c","Type":"ContainerStarted","Data":"ef1269e151209764b93101d0b6935d7df5b548ebcdd97e697a126c9da560b95c"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.777642 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" event={"ID":"c17e743d-d33d-45c8-a15b-a6759f00838d","Type":"ContainerStarted","Data":"ca40d9aab10824a24844e053c125691041bed5a6f37d2451b5373b895d121225"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.779199 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:54 crc kubenswrapper[4711]: E0123 08:22:54.779644 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:55.279623943 +0000 UTC m=+160.852580311 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.786317 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" event={"ID":"238d099f-eb44-4b83-a996-647f7adad7d1","Type":"ContainerStarted","Data":"1bfdd10ddb3dae22bc4da65aba17464c416a62e7477436a5cf635bbbd3f12ba2"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.787617 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.791186 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" event={"ID":"9f985710-d7f9-4d47-bab6-12cea6e28ae9","Type":"ContainerStarted","Data":"164f8c6a2f129c98888f7dc1e93a27ec96c17b01717870057949806cfdf3fa9d"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.800350 4711 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rhrsx container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.800381 4711 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-z87tl container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.800420 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" podUID="8b4e79a6-ff8b-4293-931b-bde9f25b7576" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.800462 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" podUID="ed15b7dc-3a50-412e-a239-ed37611684a8" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.800636 4711 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-cw84n container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:5443/healthz\": dial tcp 10.217.0.20:5443: connect: connection refused" start-of-body= Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.800656 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" podUID="238d099f-eb44-4b83-a996-647f7adad7d1" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.20:5443/healthz\": dial tcp 10.217.0.20:5443: connect: connection refused" Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.801326 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" event={"ID":"e1ab258d-ba2f-434d-8a15-2a08f24d03cb","Type":"ContainerStarted","Data":"806b801deab8860adaf602e8a412d238e40516c62c604d434f2ad5d861286a96"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.812880 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" event={"ID":"7c9cf2f9-5ba9-4670-9240-25b3d24104c4","Type":"ContainerStarted","Data":"efa85e5b03e50112ba788a248ddefa9414df61d8c47d07f126a8f304964e2374"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.821219 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-zg5c9" podStartSLOduration=137.821197276 podStartE2EDuration="2m17.821197276s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:54.794329333 +0000 UTC m=+160.367285701" watchObservedRunningTime="2026-01-23 08:22:54.821197276 +0000 UTC m=+160.394153644" Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.877012 4711 patch_prober.go:28] interesting pod/router-default-5444994796-v2m5p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 23 08:22:54 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Jan 23 08:22:54 crc kubenswrapper[4711]: [+]process-running ok Jan 23 08:22:54 crc kubenswrapper[4711]: healthz check failed Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.878377 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-v2m5p" podUID="88908809-4dd9-4e62-9c5b-1bf8b3cfdaed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.877339 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" event={"ID":"0915c58a-b29f-4de3-b5a0-e9985a2fd699","Type":"ContainerStarted","Data":"e90f459e7e477164a623ab55c2779e23c39737f3be6d51e216bbfccb569d5277"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.890196 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:54 crc kubenswrapper[4711]: E0123 08:22:54.891192 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:55.391143991 +0000 UTC m=+160.964100359 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.891399 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:54 crc kubenswrapper[4711]: E0123 08:22:54.900110 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:55.400070445 +0000 UTC m=+160.973026813 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.918341 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" event={"ID":"343eb722-17b5-4a43-874c-2dc792355793","Type":"ContainerStarted","Data":"58772ac8ae9509dafad88866c21670a209b249d2e76bc112df126971dd7f7854"} Jan 23 08:22:54 crc kubenswrapper[4711]: I0123 08:22:54.945257 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-nd7dc" event={"ID":"0bc67a26-61e7-42bd-b765-2c992886ac63","Type":"ContainerStarted","Data":"1a59ac62b3c6700ffa6ffffbd32051c9b66e818d60142353e2ec62709ba4a0a6"} Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.008448 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:55 crc kubenswrapper[4711]: E0123 08:22:55.009734 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:55.509701595 +0000 UTC m=+161.082658033 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.046889 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5mwhk" podStartSLOduration=138.046860397 podStartE2EDuration="2m18.046860397s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:54.939211507 +0000 UTC m=+160.512167885" watchObservedRunningTime="2026-01-23 08:22:55.046860397 +0000 UTC m=+160.619816755" Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.049047 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-wl6fc" podStartSLOduration=10.049033672 podStartE2EDuration="10.049033672s" podCreationTimestamp="2026-01-23 08:22:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:55.044792106 +0000 UTC m=+160.617748484" watchObservedRunningTime="2026-01-23 08:22:55.049033672 +0000 UTC m=+160.621990050" Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.055798 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" event={"ID":"fde6fa25-f831-4858-96d5-c549d889c4c9","Type":"ContainerStarted","Data":"e5ec79923e32d1738aeae3f0fca57640baf4251f4644ef347b94795bffdbbd98"} Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.083937 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-z2ndc" Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.135368 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" podStartSLOduration=138.135335266 podStartE2EDuration="2m18.135335266s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:55.084612274 +0000 UTC m=+160.657568652" watchObservedRunningTime="2026-01-23 08:22:55.135335266 +0000 UTC m=+160.708291634" Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.136677 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:55 crc kubenswrapper[4711]: E0123 08:22:55.137185 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:55.637165773 +0000 UTC m=+161.210122141 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.238388 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:55 crc kubenswrapper[4711]: E0123 08:22:55.238713 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:55.738677489 +0000 UTC m=+161.311633847 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.238989 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:55 crc kubenswrapper[4711]: E0123 08:22:55.240784 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:55.740767942 +0000 UTC m=+161.313724310 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.262243 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xcw55" podStartSLOduration=138.262215939 podStartE2EDuration="2m18.262215939s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:55.261748668 +0000 UTC m=+160.834705036" watchObservedRunningTime="2026-01-23 08:22:55.262215939 +0000 UTC m=+160.835172307" Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.263282 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" podStartSLOduration=138.263276996 podStartE2EDuration="2m18.263276996s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:55.213254711 +0000 UTC m=+160.786211079" watchObservedRunningTime="2026-01-23 08:22:55.263276996 +0000 UTC m=+160.836233364" Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.329135 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" podStartSLOduration=138.329112198 podStartE2EDuration="2m18.329112198s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:55.32598781 +0000 UTC m=+160.898944178" watchObservedRunningTime="2026-01-23 08:22:55.329112198 +0000 UTC m=+160.902068576" Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.341651 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:55 crc kubenswrapper[4711]: E0123 08:22:55.341936 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:55.841915229 +0000 UTC m=+161.414871597 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.381145 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zmvtv" podStartSLOduration=138.381126142 podStartE2EDuration="2m18.381126142s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:55.38062661 +0000 UTC m=+160.953582988" watchObservedRunningTime="2026-01-23 08:22:55.381126142 +0000 UTC m=+160.954082510" Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.413071 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gtl2r" podStartSLOduration=138.413048103 podStartE2EDuration="2m18.413048103s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:55.411616278 +0000 UTC m=+160.984572646" watchObservedRunningTime="2026-01-23 08:22:55.413048103 +0000 UTC m=+160.986004471" Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.445316 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:55 crc kubenswrapper[4711]: E0123 08:22:55.445994 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:55.945974709 +0000 UTC m=+161.518931077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.502156 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-7bjqm" podStartSLOduration=138.502128277 podStartE2EDuration="2m18.502128277s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:55.500609199 +0000 UTC m=+161.073565557" watchObservedRunningTime="2026-01-23 08:22:55.502128277 +0000 UTC m=+161.075084645" Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.543839 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" podStartSLOduration=139.543813923 podStartE2EDuration="2m19.543813923s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:55.54287079 +0000 UTC m=+161.115827158" watchObservedRunningTime="2026-01-23 08:22:55.543813923 +0000 UTC m=+161.116770291" Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.547391 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:55 crc kubenswrapper[4711]: E0123 08:22:55.547737 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:56.047694411 +0000 UTC m=+161.620650789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.554812 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:55 crc kubenswrapper[4711]: E0123 08:22:55.555562 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:56.055537708 +0000 UTC m=+161.628494076 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.657449 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:55 crc kubenswrapper[4711]: E0123 08:22:55.657956 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:56.157930486 +0000 UTC m=+161.730886854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.762731 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:55 crc kubenswrapper[4711]: E0123 08:22:55.763290 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:56.263274229 +0000 UTC m=+161.836230597 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.865106 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:55 crc kubenswrapper[4711]: E0123 08:22:55.865466 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:56.365444712 +0000 UTC m=+161.938401080 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.875706 4711 patch_prober.go:28] interesting pod/router-default-5444994796-v2m5p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 23 08:22:55 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Jan 23 08:22:55 crc kubenswrapper[4711]: [+]process-running ok Jan 23 08:22:55 crc kubenswrapper[4711]: healthz check failed Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.875766 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-v2m5p" podUID="88908809-4dd9-4e62-9c5b-1bf8b3cfdaed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.979203 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:55 crc kubenswrapper[4711]: E0123 08:22:55.979995 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:56.479975315 +0000 UTC m=+162.052931683 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.998119 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:22:55 crc kubenswrapper[4711]: I0123 08:22:55.998211 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.082402 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:56 crc kubenswrapper[4711]: E0123 08:22:56.082905 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:56.582883996 +0000 UTC m=+162.155840364 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.140389 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" event={"ID":"0c1a3ec4-2523-4339-a252-99b7d88c8c93","Type":"ContainerStarted","Data":"744c73d721f88957970f2ac3a00909af768ff276c76698f105fa3f6e95f5e625"} Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.141413 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.155347 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nb2x5" event={"ID":"3537ecbf-d37c-458e-b460-8f72882bfd08","Type":"ContainerStarted","Data":"b2fd654dfc7b12f4df0ae61491cc43b36992a330bf03f48ce2b87d3fb825c62c"} Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.161396 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hn45s" event={"ID":"8d219b79-0bde-4bac-ae6e-2e39a766d0e0","Type":"ContainerStarted","Data":"5de95d9a732559398dadd1f5ee9e01f6a4d842cf9dbbfc00ec2f39cd7bd91972"} Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.165245 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" podStartSLOduration=139.165218121 podStartE2EDuration="2m19.165218121s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:56.16117502 +0000 UTC m=+161.734131388" watchObservedRunningTime="2026-01-23 08:22:56.165218121 +0000 UTC m=+161.738174489" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.174007 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" event={"ID":"343eb722-17b5-4a43-874c-2dc792355793","Type":"ContainerStarted","Data":"77180bcf74fc8fec94ecc443500f60e8b66238ccd0e6c412bcdd885f799c82c5"} Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.177341 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-nd7dc" event={"ID":"0bc67a26-61e7-42bd-b765-2c992886ac63","Type":"ContainerStarted","Data":"fc577db335c4a1e5e27ce390724f67364cb0775d1a1dd165a989b15ed410bcbc"} Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.177778 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-nd7dc" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.185537 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:56 crc kubenswrapper[4711]: E0123 08:22:56.188989 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:56.688957497 +0000 UTC m=+162.261913865 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.198370 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" event={"ID":"f34584c2-05c1-4311-9168-fb7fb86976d9","Type":"ContainerStarted","Data":"74d1223b1f566c9358815d578e8bcf2a3c095e6a8aeda57f3e429db84c76f4bd"} Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.199855 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-nb2x5" podStartSLOduration=139.19983701 podStartE2EDuration="2m19.19983701s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:56.196984199 +0000 UTC m=+161.769940567" watchObservedRunningTime="2026-01-23 08:22:56.19983701 +0000 UTC m=+161.772793378" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.207074 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" event={"ID":"ce996e23-feba-490d-9e1b-1ae772ed7886","Type":"ContainerStarted","Data":"f4f9eaba1b5c683aca2366d19b2a6eb39d498059854e8dcea8de336bbe2ee7a9"} Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.224556 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" event={"ID":"df27a7ac-56ad-458a-8954-4177f65db5ac","Type":"ContainerStarted","Data":"13aa5a2e0db602a9955562be599a0da57f18d62ea21d59b686a9f0c21e1903cc"} Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.225210 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.245820 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" event={"ID":"9f985710-d7f9-4d47-bab6-12cea6e28ae9","Type":"ContainerStarted","Data":"8e5176cd621c6979769a57dd831c9550b4403322e8bfdde8bda341a218ba4ff3"} Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.255651 4711 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rhrsx container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.255727 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" podUID="8b4e79a6-ff8b-4293-931b-bde9f25b7576" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.289715 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:56 crc kubenswrapper[4711]: E0123 08:22:56.291224 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:56.791200992 +0000 UTC m=+162.364157360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.291252 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-z87tl" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.332297 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-p9lxv" podStartSLOduration=139.332272252 podStartE2EDuration="2m19.332272252s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:56.317984324 +0000 UTC m=+161.890940692" watchObservedRunningTime="2026-01-23 08:22:56.332272252 +0000 UTC m=+161.905228620" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.332524 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-hn45s" podStartSLOduration=139.332517318 podStartE2EDuration="2m19.332517318s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:56.262549494 +0000 UTC m=+161.835505862" watchObservedRunningTime="2026-01-23 08:22:56.332517318 +0000 UTC m=+161.905473686" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.360020 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-nd7dc" podStartSLOduration=11.359995857 podStartE2EDuration="11.359995857s" podCreationTimestamp="2026-01-23 08:22:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:56.358563252 +0000 UTC m=+161.931519620" watchObservedRunningTime="2026-01-23 08:22:56.359995857 +0000 UTC m=+161.932952225" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.398321 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:56 crc kubenswrapper[4711]: E0123 08:22:56.399063 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:56.899044827 +0000 UTC m=+162.472001205 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.454657 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mrmw6" podStartSLOduration=139.454634232 podStartE2EDuration="2m19.454634232s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:56.396829611 +0000 UTC m=+161.969785979" watchObservedRunningTime="2026-01-23 08:22:56.454634232 +0000 UTC m=+162.027590600" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.462220 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-s6xwh" podStartSLOduration=139.462189011 podStartE2EDuration="2m19.462189011s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:56.45295589 +0000 UTC m=+162.025912258" watchObservedRunningTime="2026-01-23 08:22:56.462189011 +0000 UTC m=+162.035145389" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.504092 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:56 crc kubenswrapper[4711]: E0123 08:22:56.504583 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.004557154 +0000 UTC m=+162.577513522 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.549177 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" podStartSLOduration=140.549154702 podStartE2EDuration="2m20.549154702s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:56.504900443 +0000 UTC m=+162.077856811" watchObservedRunningTime="2026-01-23 08:22:56.549154702 +0000 UTC m=+162.122111070" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.606693 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:56 crc kubenswrapper[4711]: E0123 08:22:56.607112 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.107098007 +0000 UTC m=+162.680054375 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.707765 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:56 crc kubenswrapper[4711]: E0123 08:22:56.708014 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.207959437 +0000 UTC m=+162.780915815 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.708431 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:56 crc kubenswrapper[4711]: E0123 08:22:56.708794 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.208777207 +0000 UTC m=+162.781733575 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.809963 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:56 crc kubenswrapper[4711]: E0123 08:22:56.810471 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.310416357 +0000 UTC m=+162.883372735 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.863452 4711 patch_prober.go:28] interesting pod/router-default-5444994796-v2m5p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 23 08:22:56 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Jan 23 08:22:56 crc kubenswrapper[4711]: [+]process-running ok Jan 23 08:22:56 crc kubenswrapper[4711]: healthz check failed Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.863924 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-v2m5p" podUID="88908809-4dd9-4e62-9c5b-1bf8b3cfdaed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 23 08:22:56 crc kubenswrapper[4711]: I0123 08:22:56.912148 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:56 crc kubenswrapper[4711]: E0123 08:22:56.912609 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.41259259 +0000 UTC m=+162.985548958 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.013351 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.013830 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.513805559 +0000 UTC m=+163.086761927 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.114789 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.115244 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.615228543 +0000 UTC m=+163.188184911 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.216016 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.216237 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.716190536 +0000 UTC m=+163.289146904 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.216340 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.216808 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.716792961 +0000 UTC m=+163.289749329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.245825 4711 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-cw84n container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.246004 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" podUID="238d099f-eb44-4b83-a996-647f7adad7d1" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.20:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.253747 4711 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rhrsx container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.253820 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" podUID="8b4e79a6-ff8b-4293-931b-bde9f25b7576" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.317235 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.317450 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.817409575 +0000 UTC m=+163.390365943 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.318120 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.320453 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.820438461 +0000 UTC m=+163.393394829 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.419485 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.419734 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.91968252 +0000 UTC m=+163.492638928 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.420149 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.420899 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:57.92087442 +0000 UTC m=+163.493830828 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.520857 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.521054 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.021015372 +0000 UTC m=+163.593971750 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.521124 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.521624 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.021608587 +0000 UTC m=+163.594565035 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.622374 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.622583 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.122540189 +0000 UTC m=+163.695496557 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.622780 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.623321 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.123297398 +0000 UTC m=+163.696253766 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.685575 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.690806 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-7nvn6" Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.724293 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.724605 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.224544758 +0000 UTC m=+163.797501126 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.724773 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.726793 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.226742503 +0000 UTC m=+163.799698881 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.826254 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.826742 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.326717311 +0000 UTC m=+163.899673679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.865551 4711 patch_prober.go:28] interesting pod/router-default-5444994796-v2m5p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 23 08:22:57 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Jan 23 08:22:57 crc kubenswrapper[4711]: [+]process-running ok Jan 23 08:22:57 crc kubenswrapper[4711]: healthz check failed Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.865641 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-v2m5p" podUID="88908809-4dd9-4e62-9c5b-1bf8b3cfdaed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 23 08:22:57 crc kubenswrapper[4711]: I0123 08:22:57.934565 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:57 crc kubenswrapper[4711]: E0123 08:22:57.934975 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.434957466 +0000 UTC m=+164.007913834 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.035929 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:58 crc kubenswrapper[4711]: E0123 08:22:58.036381 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.53635804 +0000 UTC m=+164.109314408 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.088224 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-shg2k"] Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.089600 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.094463 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.106043 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-shg2k"] Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.137160 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.137220 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8mls\" (UniqueName: \"kubernetes.io/projected/303af9ef-3014-4b33-ba8e-f6b4a9227485-kube-api-access-w8mls\") pod \"community-operators-shg2k\" (UID: \"303af9ef-3014-4b33-ba8e-f6b4a9227485\") " pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.137295 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303af9ef-3014-4b33-ba8e-f6b4a9227485-utilities\") pod \"community-operators-shg2k\" (UID: \"303af9ef-3014-4b33-ba8e-f6b4a9227485\") " pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.137314 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303af9ef-3014-4b33-ba8e-f6b4a9227485-catalog-content\") pod \"community-operators-shg2k\" (UID: \"303af9ef-3014-4b33-ba8e-f6b4a9227485\") " pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:22:58 crc kubenswrapper[4711]: E0123 08:22:58.137636 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.637622531 +0000 UTC m=+164.210578899 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.156761 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw84n" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.209331 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.209389 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.211190 4711 patch_prober.go:28] interesting pod/console-f9d7485db-sk8zj container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.211270 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-sk8zj" podUID="a7e00bfd-844d-4264-aff6-d2bdb6673084" containerName="console" probeResult="failure" output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.244756 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:58 crc kubenswrapper[4711]: E0123 08:22:58.244996 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.744953862 +0000 UTC m=+164.317910230 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.245831 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303af9ef-3014-4b33-ba8e-f6b4a9227485-utilities\") pod \"community-operators-shg2k\" (UID: \"303af9ef-3014-4b33-ba8e-f6b4a9227485\") " pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.245888 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303af9ef-3014-4b33-ba8e-f6b4a9227485-catalog-content\") pod \"community-operators-shg2k\" (UID: \"303af9ef-3014-4b33-ba8e-f6b4a9227485\") " pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.246001 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.246048 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8mls\" (UniqueName: \"kubernetes.io/projected/303af9ef-3014-4b33-ba8e-f6b4a9227485-kube-api-access-w8mls\") pod \"community-operators-shg2k\" (UID: \"303af9ef-3014-4b33-ba8e-f6b4a9227485\") " pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.247133 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303af9ef-3014-4b33-ba8e-f6b4a9227485-utilities\") pod \"community-operators-shg2k\" (UID: \"303af9ef-3014-4b33-ba8e-f6b4a9227485\") " pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:22:58 crc kubenswrapper[4711]: E0123 08:22:58.247358 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.747331002 +0000 UTC m=+164.320287370 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.247580 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303af9ef-3014-4b33-ba8e-f6b4a9227485-catalog-content\") pod \"community-operators-shg2k\" (UID: \"303af9ef-3014-4b33-ba8e-f6b4a9227485\") " pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.273352 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9qd4n"] Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.274634 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.299463 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8mls\" (UniqueName: \"kubernetes.io/projected/303af9ef-3014-4b33-ba8e-f6b4a9227485-kube-api-access-w8mls\") pod \"community-operators-shg2k\" (UID: \"303af9ef-3014-4b33-ba8e-f6b4a9227485\") " pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.306337 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" event={"ID":"ce996e23-feba-490d-9e1b-1ae772ed7886","Type":"ContainerStarted","Data":"f6d3ffb92f75dcc105444c3be97bf8d3aa3051dbbc2c0d453ea5fe4df42e91f2"} Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.308219 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.344176 4711 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.345278 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9qd4n"] Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.351550 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:58 crc kubenswrapper[4711]: E0123 08:22:58.351901 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.851864935 +0000 UTC m=+164.424821303 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.359972 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc70bbe1-6f64-4501-b4cd-afd381a50e86-utilities\") pod \"certified-operators-9qd4n\" (UID: \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\") " pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.360111 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5qdz\" (UniqueName: \"kubernetes.io/projected/dc70bbe1-6f64-4501-b4cd-afd381a50e86-kube-api-access-s5qdz\") pod \"certified-operators-9qd4n\" (UID: \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\") " pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.360569 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc70bbe1-6f64-4501-b4cd-afd381a50e86-catalog-content\") pod \"certified-operators-9qd4n\" (UID: \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\") " pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.360672 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:58 crc kubenswrapper[4711]: E0123 08:22:58.363784 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.863743322 +0000 UTC m=+164.436699720 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.421524 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.452055 4711 patch_prober.go:28] interesting pod/downloads-7954f5f757-gxhxb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.24:8080/\": dial tcp 10.217.0.24:8080: connect: connection refused" start-of-body= Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.452136 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gxhxb" podUID="9b990ad9-6046-4ef0-bf53-1a5a74c9d0d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.24:8080/\": dial tcp 10.217.0.24:8080: connect: connection refused" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.452837 4711 patch_prober.go:28] interesting pod/downloads-7954f5f757-gxhxb container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.24:8080/\": dial tcp 10.217.0.24:8080: connect: connection refused" start-of-body= Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.452859 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-gxhxb" podUID="9b990ad9-6046-4ef0-bf53-1a5a74c9d0d8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.24:8080/\": dial tcp 10.217.0.24:8080: connect: connection refused" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.461349 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q5bgt"] Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.462409 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.463499 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.463798 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc70bbe1-6f64-4501-b4cd-afd381a50e86-utilities\") pod \"certified-operators-9qd4n\" (UID: \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\") " pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.463846 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5qdz\" (UniqueName: \"kubernetes.io/projected/dc70bbe1-6f64-4501-b4cd-afd381a50e86-kube-api-access-s5qdz\") pod \"certified-operators-9qd4n\" (UID: \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\") " pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.463923 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc70bbe1-6f64-4501-b4cd-afd381a50e86-catalog-content\") pod \"certified-operators-9qd4n\" (UID: \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\") " pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.464407 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc70bbe1-6f64-4501-b4cd-afd381a50e86-catalog-content\") pod \"certified-operators-9qd4n\" (UID: \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\") " pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:22:58 crc kubenswrapper[4711]: E0123 08:22:58.464518 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:58.96448099 +0000 UTC m=+164.537437358 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.464739 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc70bbe1-6f64-4501-b4cd-afd381a50e86-utilities\") pod \"certified-operators-9qd4n\" (UID: \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\") " pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.521370 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5qdz\" (UniqueName: \"kubernetes.io/projected/dc70bbe1-6f64-4501-b4cd-afd381a50e86-kube-api-access-s5qdz\") pod \"certified-operators-9qd4n\" (UID: \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\") " pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.529430 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q5bgt"] Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.583861 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.584383 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xd88\" (UniqueName: \"kubernetes.io/projected/ac8209ab-0e14-4fed-9dcc-0978176748a1-kube-api-access-5xd88\") pod \"community-operators-q5bgt\" (UID: \"ac8209ab-0e14-4fed-9dcc-0978176748a1\") " pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.584530 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac8209ab-0e14-4fed-9dcc-0978176748a1-catalog-content\") pod \"community-operators-q5bgt\" (UID: \"ac8209ab-0e14-4fed-9dcc-0978176748a1\") " pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.584645 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac8209ab-0e14-4fed-9dcc-0978176748a1-utilities\") pod \"community-operators-q5bgt\" (UID: \"ac8209ab-0e14-4fed-9dcc-0978176748a1\") " pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:22:58 crc kubenswrapper[4711]: E0123 08:22:58.585200 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:59.085179207 +0000 UTC m=+164.658135585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.647714 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.688923 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.689217 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xd88\" (UniqueName: \"kubernetes.io/projected/ac8209ab-0e14-4fed-9dcc-0978176748a1-kube-api-access-5xd88\") pod \"community-operators-q5bgt\" (UID: \"ac8209ab-0e14-4fed-9dcc-0978176748a1\") " pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.689243 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac8209ab-0e14-4fed-9dcc-0978176748a1-catalog-content\") pod \"community-operators-q5bgt\" (UID: \"ac8209ab-0e14-4fed-9dcc-0978176748a1\") " pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.689264 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac8209ab-0e14-4fed-9dcc-0978176748a1-utilities\") pod \"community-operators-q5bgt\" (UID: \"ac8209ab-0e14-4fed-9dcc-0978176748a1\") " pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.689723 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac8209ab-0e14-4fed-9dcc-0978176748a1-utilities\") pod \"community-operators-q5bgt\" (UID: \"ac8209ab-0e14-4fed-9dcc-0978176748a1\") " pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:22:58 crc kubenswrapper[4711]: E0123 08:22:58.689798 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:59.189778831 +0000 UTC m=+164.762735199 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.690259 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac8209ab-0e14-4fed-9dcc-0978176748a1-catalog-content\") pod \"community-operators-q5bgt\" (UID: \"ac8209ab-0e14-4fed-9dcc-0978176748a1\") " pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.726975 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pvd47"] Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.728118 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.753991 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.754360 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pvd47"] Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.795164 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xd88\" (UniqueName: \"kubernetes.io/projected/ac8209ab-0e14-4fed-9dcc-0978176748a1-kube-api-access-5xd88\") pod \"community-operators-q5bgt\" (UID: \"ac8209ab-0e14-4fed-9dcc-0978176748a1\") " pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.797341 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.797389 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-utilities\") pod \"certified-operators-pvd47\" (UID: \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\") " pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.797423 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f94vj\" (UniqueName: \"kubernetes.io/projected/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-kube-api-access-f94vj\") pod \"certified-operators-pvd47\" (UID: \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\") " pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.797472 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-catalog-content\") pod \"certified-operators-pvd47\" (UID: \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\") " pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:22:58 crc kubenswrapper[4711]: E0123 08:22:58.798206 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:59.298186611 +0000 UTC m=+164.871142979 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.823940 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.868111 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.876166 4711 patch_prober.go:28] interesting pod/router-default-5444994796-v2m5p container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 23 08:22:58 crc kubenswrapper[4711]: [-]has-synced failed: reason withheld Jan 23 08:22:58 crc kubenswrapper[4711]: [+]process-running ok Jan 23 08:22:58 crc kubenswrapper[4711]: healthz check failed Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.876236 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-v2m5p" podUID="88908809-4dd9-4e62-9c5b-1bf8b3cfdaed" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.899056 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.899302 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-utilities\") pod \"certified-operators-pvd47\" (UID: \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\") " pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.899346 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f94vj\" (UniqueName: \"kubernetes.io/projected/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-kube-api-access-f94vj\") pod \"certified-operators-pvd47\" (UID: \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\") " pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.899421 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-catalog-content\") pod \"certified-operators-pvd47\" (UID: \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\") " pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:22:58 crc kubenswrapper[4711]: E0123 08:22:58.900271 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-23 08:22:59.400246691 +0000 UTC m=+164.973203059 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.900859 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-utilities\") pod \"certified-operators-pvd47\" (UID: \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\") " pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:22:58 crc kubenswrapper[4711]: I0123 08:22:58.901606 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-catalog-content\") pod \"certified-operators-pvd47\" (UID: \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\") " pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.003400 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f94vj\" (UniqueName: \"kubernetes.io/projected/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-kube-api-access-f94vj\") pod \"certified-operators-pvd47\" (UID: \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\") " pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.016647 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:59 crc kubenswrapper[4711]: E0123 08:22:59.018317 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-23 08:22:59.518283722 +0000 UTC m=+165.091240090 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-58wxx" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.029576 4711 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-23T08:22:58.344670635Z","Handler":null,"Name":""} Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.069054 4711 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.069110 4711 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.084102 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-shg2k"] Jan 23 08:22:59 crc kubenswrapper[4711]: W0123 08:22:59.115358 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod303af9ef_3014_4b33_ba8e_f6b4a9227485.slice/crio-d80e561ae6963985ee3c71c36b0ac91dfd903475c5accd3c5e70d3816c5e8d81 WatchSource:0}: Error finding container d80e561ae6963985ee3c71c36b0ac91dfd903475c5accd3c5e70d3816c5e8d81: Status 404 returned error can't find the container with id d80e561ae6963985ee3c71c36b0ac91dfd903475c5accd3c5e70d3816c5e8d81 Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.117451 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.126814 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.273801 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.320025 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.327627 4711 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.327669 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.338542 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shg2k" event={"ID":"303af9ef-3014-4b33-ba8e-f6b4a9227485","Type":"ContainerStarted","Data":"d80e561ae6963985ee3c71c36b0ac91dfd903475c5accd3c5e70d3816c5e8d81"} Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.348732 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" event={"ID":"ce996e23-feba-490d-9e1b-1ae772ed7886","Type":"ContainerStarted","Data":"e540bacc061a84c9919db06c1b43b7f14a60218c8e30188b4e40094766e7506b"} Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.348791 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" event={"ID":"ce996e23-feba-490d-9e1b-1ae772ed7886","Type":"ContainerStarted","Data":"62fe69083b9702ab9fd14a38b7acd1018ef4e2287ac9d9c8c0f9e719e3fcbd5b"} Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.494448 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.525800 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-58wxx\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.526555 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-r9c8g" podStartSLOduration=14.526542162 podStartE2EDuration="14.526542162s" podCreationTimestamp="2026-01-23 08:22:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:22:59.398281624 +0000 UTC m=+164.971237992" watchObservedRunningTime="2026-01-23 08:22:59.526542162 +0000 UTC m=+165.099498530" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.529845 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9qd4n"] Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.618049 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.622218 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.648878 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.651040 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.673110 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q5bgt"] Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.684770 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.731191 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/14fde908-f016-43a7-a521-70081a210846-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"14fde908-f016-43a7-a521-70081a210846\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.731283 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/14fde908-f016-43a7-a521-70081a210846-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"14fde908-f016-43a7-a521-70081a210846\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.731330 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.741306 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2bbf296-ae82-4cc3-b07d-bba10895a545-metrics-certs\") pod \"network-metrics-daemon-zv6rd\" (UID: \"f2bbf296-ae82-4cc3-b07d-bba10895a545\") " pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.782216 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.836735 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/14fde908-f016-43a7-a521-70081a210846-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"14fde908-f016-43a7-a521-70081a210846\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.836823 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/14fde908-f016-43a7-a521-70081a210846-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"14fde908-f016-43a7-a521-70081a210846\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.836906 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/14fde908-f016-43a7-a521-70081a210846-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"14fde908-f016-43a7-a521-70081a210846\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.868615 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.877400 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/14fde908-f016-43a7-a521-70081a210846-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"14fde908-f016-43a7-a521-70081a210846\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.881397 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-v2m5p" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.891876 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zv6rd" Jan 23 08:22:59 crc kubenswrapper[4711]: I0123 08:22:59.997809 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.019979 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pvd47"] Jan 23 08:23:00 crc kubenswrapper[4711]: W0123 08:23:00.138120 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde36fa9c_96b8_44d2_8a0d_384ea7ad1806.slice/crio-8d76494513662d7f86375fa5ca2c8ba7f7c46f4a90df94213c5ed60252eded5e WatchSource:0}: Error finding container 8d76494513662d7f86375fa5ca2c8ba7f7c46f4a90df94213c5ed60252eded5e: Status 404 returned error can't find the container with id 8d76494513662d7f86375fa5ca2c8ba7f7c46f4a90df94213c5ed60252eded5e Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.258937 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-58wxx"] Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.270286 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7xzhz"] Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.272817 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:23:00 crc kubenswrapper[4711]: W0123 08:23:00.273240 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb9eeabb_23bb_45db_bcbb_aae7c165f260.slice/crio-6d0ece91f10eab8a0f149d06e7f0e4de29b19f78af29354e7b9a8e207c6780d7 WatchSource:0}: Error finding container 6d0ece91f10eab8a0f149d06e7f0e4de29b19f78af29354e7b9a8e207c6780d7: Status 404 returned error can't find the container with id 6d0ece91f10eab8a0f149d06e7f0e4de29b19f78af29354e7b9a8e207c6780d7 Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.279062 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.294418 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xzhz"] Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.347984 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-catalog-content\") pod \"redhat-marketplace-7xzhz\" (UID: \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\") " pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.348063 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-utilities\") pod \"redhat-marketplace-7xzhz\" (UID: \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\") " pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.348112 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w8v8\" (UniqueName: \"kubernetes.io/projected/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-kube-api-access-9w8v8\") pod \"redhat-marketplace-7xzhz\" (UID: \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\") " pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.359392 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvd47" event={"ID":"de36fa9c-96b8-44d2-8a0d-384ea7ad1806","Type":"ContainerStarted","Data":"8d76494513662d7f86375fa5ca2c8ba7f7c46f4a90df94213c5ed60252eded5e"} Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.360952 4711 generic.go:334] "Generic (PLEG): container finished" podID="303af9ef-3014-4b33-ba8e-f6b4a9227485" containerID="a07f2a6a656138a829c8c6c9a6eecc2ae3152f76152e02220387a706c4ce4b43" exitCode=0 Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.361007 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shg2k" event={"ID":"303af9ef-3014-4b33-ba8e-f6b4a9227485","Type":"ContainerDied","Data":"a07f2a6a656138a829c8c6c9a6eecc2ae3152f76152e02220387a706c4ce4b43"} Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.363050 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.365364 4711 generic.go:334] "Generic (PLEG): container finished" podID="ac8209ab-0e14-4fed-9dcc-0978176748a1" containerID="869d10e7d3b796c75b6cef6f84dced38da1c4a28ab3f7e158a18ca6d17d4b42c" exitCode=0 Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.365420 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q5bgt" event={"ID":"ac8209ab-0e14-4fed-9dcc-0978176748a1","Type":"ContainerDied","Data":"869d10e7d3b796c75b6cef6f84dced38da1c4a28ab3f7e158a18ca6d17d4b42c"} Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.365443 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q5bgt" event={"ID":"ac8209ab-0e14-4fed-9dcc-0978176748a1","Type":"ContainerStarted","Data":"3d220ab63a656417a68dc9ba2c2057e53fb3bdd2f9db8350e127fafcf5ce30a7"} Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.368949 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" event={"ID":"bb9eeabb-23bb-45db-bcbb-aae7c165f260","Type":"ContainerStarted","Data":"6d0ece91f10eab8a0f149d06e7f0e4de29b19f78af29354e7b9a8e207c6780d7"} Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.376365 4711 generic.go:334] "Generic (PLEG): container finished" podID="e1ab258d-ba2f-434d-8a15-2a08f24d03cb" containerID="806b801deab8860adaf602e8a412d238e40516c62c604d434f2ad5d861286a96" exitCode=0 Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.376463 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" event={"ID":"e1ab258d-ba2f-434d-8a15-2a08f24d03cb","Type":"ContainerDied","Data":"806b801deab8860adaf602e8a412d238e40516c62c604d434f2ad5d861286a96"} Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.385007 4711 generic.go:334] "Generic (PLEG): container finished" podID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" containerID="c14b890af9ee8b597f6605dbdd6e2cd3337f8cbf9abb549611b3acc882a8d219" exitCode=0 Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.385234 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9qd4n" event={"ID":"dc70bbe1-6f64-4501-b4cd-afd381a50e86","Type":"ContainerDied","Data":"c14b890af9ee8b597f6605dbdd6e2cd3337f8cbf9abb549611b3acc882a8d219"} Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.385302 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9qd4n" event={"ID":"dc70bbe1-6f64-4501-b4cd-afd381a50e86","Type":"ContainerStarted","Data":"cc7b97520541c492eb3ef7a4fd5143c6533366a95d881127774eca463084a523"} Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.449718 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w8v8\" (UniqueName: \"kubernetes.io/projected/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-kube-api-access-9w8v8\") pod \"redhat-marketplace-7xzhz\" (UID: \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\") " pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.449825 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-catalog-content\") pod \"redhat-marketplace-7xzhz\" (UID: \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\") " pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.449901 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-utilities\") pod \"redhat-marketplace-7xzhz\" (UID: \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\") " pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.452083 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-utilities\") pod \"redhat-marketplace-7xzhz\" (UID: \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\") " pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.452901 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-catalog-content\") pod \"redhat-marketplace-7xzhz\" (UID: \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\") " pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.484737 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w8v8\" (UniqueName: \"kubernetes.io/projected/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-kube-api-access-9w8v8\") pod \"redhat-marketplace-7xzhz\" (UID: \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\") " pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.545194 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.634107 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.663124 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qq59p"] Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.664258 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.702189 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-zv6rd"] Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.738402 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 23 08:23:00 crc kubenswrapper[4711]: W0123 08:23:00.743022 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod14fde908_f016_43a7_a521_70081a210846.slice/crio-495b4eee2d8eb290ee65e90134edeacce1ab8ce0bd3d00ac192db1689f4fefd3 WatchSource:0}: Error finding container 495b4eee2d8eb290ee65e90134edeacce1ab8ce0bd3d00ac192db1689f4fefd3: Status 404 returned error can't find the container with id 495b4eee2d8eb290ee65e90134edeacce1ab8ce0bd3d00ac192db1689f4fefd3 Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.744867 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qq59p"] Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.777811 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jkvl\" (UniqueName: \"kubernetes.io/projected/0a16e14b-953f-491c-9986-b5bafcf8cd0b-kube-api-access-9jkvl\") pod \"redhat-marketplace-qq59p\" (UID: \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\") " pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.778011 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a16e14b-953f-491c-9986-b5bafcf8cd0b-catalog-content\") pod \"redhat-marketplace-qq59p\" (UID: \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\") " pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.778420 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a16e14b-953f-491c-9986-b5bafcf8cd0b-utilities\") pod \"redhat-marketplace-qq59p\" (UID: \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\") " pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.886716 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a16e14b-953f-491c-9986-b5bafcf8cd0b-utilities\") pod \"redhat-marketplace-qq59p\" (UID: \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\") " pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.886823 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jkvl\" (UniqueName: \"kubernetes.io/projected/0a16e14b-953f-491c-9986-b5bafcf8cd0b-kube-api-access-9jkvl\") pod \"redhat-marketplace-qq59p\" (UID: \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\") " pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.886866 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a16e14b-953f-491c-9986-b5bafcf8cd0b-catalog-content\") pod \"redhat-marketplace-qq59p\" (UID: \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\") " pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.887885 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a16e14b-953f-491c-9986-b5bafcf8cd0b-catalog-content\") pod \"redhat-marketplace-qq59p\" (UID: \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\") " pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.888266 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a16e14b-953f-491c-9986-b5bafcf8cd0b-utilities\") pod \"redhat-marketplace-qq59p\" (UID: \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\") " pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.943650 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jkvl\" (UniqueName: \"kubernetes.io/projected/0a16e14b-953f-491c-9986-b5bafcf8cd0b-kube-api-access-9jkvl\") pod \"redhat-marketplace-qq59p\" (UID: \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\") " pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:23:00 crc kubenswrapper[4711]: I0123 08:23:00.983147 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.018223 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xzhz"] Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.123807 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.124950 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.127885 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.130130 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.130786 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.196212 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/674c5f67-a17e-4462-bc2d-cca82e7f2e56-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"674c5f67-a17e-4462-bc2d-cca82e7f2e56\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.201450 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/674c5f67-a17e-4462-bc2d-cca82e7f2e56-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"674c5f67-a17e-4462-bc2d-cca82e7f2e56\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.303321 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/674c5f67-a17e-4462-bc2d-cca82e7f2e56-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"674c5f67-a17e-4462-bc2d-cca82e7f2e56\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.303399 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/674c5f67-a17e-4462-bc2d-cca82e7f2e56-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"674c5f67-a17e-4462-bc2d-cca82e7f2e56\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.303428 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/674c5f67-a17e-4462-bc2d-cca82e7f2e56-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"674c5f67-a17e-4462-bc2d-cca82e7f2e56\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.322065 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/674c5f67-a17e-4462-bc2d-cca82e7f2e56-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"674c5f67-a17e-4462-bc2d-cca82e7f2e56\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.408172 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzhz" event={"ID":"8f17fdf7-38ee-4c60-a0e9-e293cdd77830","Type":"ContainerStarted","Data":"f7b57397b7b3f07d8b20725952f7ecc2c8bd247f6c8ee9a775da9ac9c6838ae8"} Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.410059 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" event={"ID":"bb9eeabb-23bb-45db-bcbb-aae7c165f260","Type":"ContainerStarted","Data":"19992feb250e2ad43b750f498864bcf6e5986e228b53b78613c6cf1ec344f5b1"} Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.410688 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.412277 4711 generic.go:334] "Generic (PLEG): container finished" podID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" containerID="c12b1e33c25985533db1ac3a3ccd0bf897f86d5e5ffc0a2d3d9e45fcbbe4e442" exitCode=0 Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.412331 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvd47" event={"ID":"de36fa9c-96b8-44d2-8a0d-384ea7ad1806","Type":"ContainerDied","Data":"c12b1e33c25985533db1ac3a3ccd0bf897f86d5e5ffc0a2d3d9e45fcbbe4e442"} Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.422252 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"14fde908-f016-43a7-a521-70081a210846","Type":"ContainerStarted","Data":"495b4eee2d8eb290ee65e90134edeacce1ab8ce0bd3d00ac192db1689f4fefd3"} Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.425795 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" event={"ID":"f2bbf296-ae82-4cc3-b07d-bba10895a545","Type":"ContainerStarted","Data":"d9b7f6874a182e266f4b2c96ca3492ae77bf6eddf562cfe5d7467b069d30d327"} Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.434985 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" podStartSLOduration=144.434962256 podStartE2EDuration="2m24.434962256s" podCreationTimestamp="2026-01-23 08:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:23:01.430593586 +0000 UTC m=+167.003549954" watchObservedRunningTime="2026-01-23 08:23:01.434962256 +0000 UTC m=+167.007918614" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.467417 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-885xw"] Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.469047 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.472315 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.495553 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-885xw"] Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.495599 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qq59p"] Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.563343 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.608909 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c8fv\" (UniqueName: \"kubernetes.io/projected/9ef23d02-cc65-4020-897b-3e114c07d801-kube-api-access-2c8fv\") pod \"redhat-operators-885xw\" (UID: \"9ef23d02-cc65-4020-897b-3e114c07d801\") " pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.608971 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ef23d02-cc65-4020-897b-3e114c07d801-utilities\") pod \"redhat-operators-885xw\" (UID: \"9ef23d02-cc65-4020-897b-3e114c07d801\") " pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.609030 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ef23d02-cc65-4020-897b-3e114c07d801-catalog-content\") pod \"redhat-operators-885xw\" (UID: \"9ef23d02-cc65-4020-897b-3e114c07d801\") " pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.710266 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ef23d02-cc65-4020-897b-3e114c07d801-catalog-content\") pod \"redhat-operators-885xw\" (UID: \"9ef23d02-cc65-4020-897b-3e114c07d801\") " pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.710683 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c8fv\" (UniqueName: \"kubernetes.io/projected/9ef23d02-cc65-4020-897b-3e114c07d801-kube-api-access-2c8fv\") pod \"redhat-operators-885xw\" (UID: \"9ef23d02-cc65-4020-897b-3e114c07d801\") " pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.710721 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ef23d02-cc65-4020-897b-3e114c07d801-utilities\") pod \"redhat-operators-885xw\" (UID: \"9ef23d02-cc65-4020-897b-3e114c07d801\") " pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.710914 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ef23d02-cc65-4020-897b-3e114c07d801-catalog-content\") pod \"redhat-operators-885xw\" (UID: \"9ef23d02-cc65-4020-897b-3e114c07d801\") " pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.711115 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ef23d02-cc65-4020-897b-3e114c07d801-utilities\") pod \"redhat-operators-885xw\" (UID: \"9ef23d02-cc65-4020-897b-3e114c07d801\") " pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.731006 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c8fv\" (UniqueName: \"kubernetes.io/projected/9ef23d02-cc65-4020-897b-3e114c07d801-kube-api-access-2c8fv\") pod \"redhat-operators-885xw\" (UID: \"9ef23d02-cc65-4020-897b-3e114c07d801\") " pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.802209 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.852024 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kqptv"] Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.853188 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.866763 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kqptv"] Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.913126 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c8ded03-464c-4a85-8468-067607680129-utilities\") pod \"redhat-operators-kqptv\" (UID: \"3c8ded03-464c-4a85-8468-067607680129\") " pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.913307 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thkcp\" (UniqueName: \"kubernetes.io/projected/3c8ded03-464c-4a85-8468-067607680129-kube-api-access-thkcp\") pod \"redhat-operators-kqptv\" (UID: \"3c8ded03-464c-4a85-8468-067607680129\") " pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:23:01 crc kubenswrapper[4711]: I0123 08:23:01.913344 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c8ded03-464c-4a85-8468-067607680129-catalog-content\") pod \"redhat-operators-kqptv\" (UID: \"3c8ded03-464c-4a85-8468-067607680129\") " pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.014783 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thkcp\" (UniqueName: \"kubernetes.io/projected/3c8ded03-464c-4a85-8468-067607680129-kube-api-access-thkcp\") pod \"redhat-operators-kqptv\" (UID: \"3c8ded03-464c-4a85-8468-067607680129\") " pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.014875 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c8ded03-464c-4a85-8468-067607680129-catalog-content\") pod \"redhat-operators-kqptv\" (UID: \"3c8ded03-464c-4a85-8468-067607680129\") " pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.014936 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c8ded03-464c-4a85-8468-067607680129-utilities\") pod \"redhat-operators-kqptv\" (UID: \"3c8ded03-464c-4a85-8468-067607680129\") " pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.015550 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c8ded03-464c-4a85-8468-067607680129-utilities\") pod \"redhat-operators-kqptv\" (UID: \"3c8ded03-464c-4a85-8468-067607680129\") " pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.015643 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c8ded03-464c-4a85-8468-067607680129-catalog-content\") pod \"redhat-operators-kqptv\" (UID: \"3c8ded03-464c-4a85-8468-067607680129\") " pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.040879 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thkcp\" (UniqueName: \"kubernetes.io/projected/3c8ded03-464c-4a85-8468-067607680129-kube-api-access-thkcp\") pod \"redhat-operators-kqptv\" (UID: \"3c8ded03-464c-4a85-8468-067607680129\") " pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.167999 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.329705 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.419165 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-config-volume\") pod \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\" (UID: \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\") " Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.419455 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwk4g\" (UniqueName: \"kubernetes.io/projected/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-kube-api-access-bwk4g\") pod \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\" (UID: \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\") " Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.419802 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-secret-volume\") pod \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\" (UID: \"e1ab258d-ba2f-434d-8a15-2a08f24d03cb\") " Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.422613 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "e1ab258d-ba2f-434d-8a15-2a08f24d03cb" (UID: "e1ab258d-ba2f-434d-8a15-2a08f24d03cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.423845 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-kube-api-access-bwk4g" (OuterVolumeSpecName: "kube-api-access-bwk4g") pod "e1ab258d-ba2f-434d-8a15-2a08f24d03cb" (UID: "e1ab258d-ba2f-434d-8a15-2a08f24d03cb"). InnerVolumeSpecName "kube-api-access-bwk4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.429287 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e1ab258d-ba2f-434d-8a15-2a08f24d03cb" (UID: "e1ab258d-ba2f-434d-8a15-2a08f24d03cb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.446334 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzhz" event={"ID":"8f17fdf7-38ee-4c60-a0e9-e293cdd77830","Type":"ContainerStarted","Data":"1f4f0199628880e379c4fd6615da6ba8f9a4f4d398cda362e89af71a80f40522"} Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.448565 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" event={"ID":"e1ab258d-ba2f-434d-8a15-2a08f24d03cb","Type":"ContainerDied","Data":"d0260664dd01a6585928594c698c8cd2d5ded01f9b9fe8436eaa3105184dc63d"} Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.448626 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0260664dd01a6585928594c698c8cd2d5ded01f9b9fe8436eaa3105184dc63d" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.448697 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.450636 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qq59p" event={"ID":"0a16e14b-953f-491c-9986-b5bafcf8cd0b","Type":"ContainerStarted","Data":"7dfb5fe2248f72147abc0bfc212fa50177980e11e221e829de5c39e0d2e68522"} Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.459202 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" event={"ID":"f2bbf296-ae82-4cc3-b07d-bba10895a545","Type":"ContainerStarted","Data":"10eec0148faf77c26cdac20c21c696f53f64f3fec7ef213c229def9ce65d5ce7"} Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.524414 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.524460 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwk4g\" (UniqueName: \"kubernetes.io/projected/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-kube-api-access-bwk4g\") on node \"crc\" DevicePath \"\"" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.524475 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1ab258d-ba2f-434d-8a15-2a08f24d03cb-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.811229 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.818738 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kqptv"] Jan 23 08:23:02 crc kubenswrapper[4711]: I0123 08:23:02.863814 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-885xw"] Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.485771 4711 generic.go:334] "Generic (PLEG): container finished" podID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" containerID="8ce99308f919652241bd476f7f200338c78e4484fcfd5c80ee01130db94a9245" exitCode=0 Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.486685 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"674c5f67-a17e-4462-bc2d-cca82e7f2e56","Type":"ContainerStarted","Data":"142b7bff41e1d9fea96a8b59417f14a80f9c395d7d9f43673776285b97e4beac"} Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.486731 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"674c5f67-a17e-4462-bc2d-cca82e7f2e56","Type":"ContainerStarted","Data":"369ddcfa9194603c5c1826c259727c8ce317d14ba8575e06aab29f3d9ca31e9b"} Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.486741 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qq59p" event={"ID":"0a16e14b-953f-491c-9986-b5bafcf8cd0b","Type":"ContainerDied","Data":"8ce99308f919652241bd476f7f200338c78e4484fcfd5c80ee01130db94a9245"} Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.488497 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"14fde908-f016-43a7-a521-70081a210846","Type":"ContainerStarted","Data":"37dabcc7d2ddd7ffb65421672f55bcd0d7830cea8000ee45c3c6b77cf6e576d0"} Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.493165 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zv6rd" event={"ID":"f2bbf296-ae82-4cc3-b07d-bba10895a545","Type":"ContainerStarted","Data":"3f37352d4b3a38d83cf2057d8dc89b63d3b41a2de3f90899d1b3ac6c9ace3e79"} Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.493497 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.493462513 podStartE2EDuration="2.493462513s" podCreationTimestamp="2026-01-23 08:23:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:23:03.491243727 +0000 UTC m=+169.064200095" watchObservedRunningTime="2026-01-23 08:23:03.493462513 +0000 UTC m=+169.066418881" Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.497273 4711 generic.go:334] "Generic (PLEG): container finished" podID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" containerID="1f4f0199628880e379c4fd6615da6ba8f9a4f4d398cda362e89af71a80f40522" exitCode=0 Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.497578 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzhz" event={"ID":"8f17fdf7-38ee-4c60-a0e9-e293cdd77830","Type":"ContainerDied","Data":"1f4f0199628880e379c4fd6615da6ba8f9a4f4d398cda362e89af71a80f40522"} Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.502972 4711 generic.go:334] "Generic (PLEG): container finished" podID="3c8ded03-464c-4a85-8468-067607680129" containerID="4131b1d0cce1d3e24ba46beba0b306cb7fb435d51f024c62fe66cd2f24da5efe" exitCode=0 Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.503048 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqptv" event={"ID":"3c8ded03-464c-4a85-8468-067607680129","Type":"ContainerDied","Data":"4131b1d0cce1d3e24ba46beba0b306cb7fb435d51f024c62fe66cd2f24da5efe"} Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.503084 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqptv" event={"ID":"3c8ded03-464c-4a85-8468-067607680129","Type":"ContainerStarted","Data":"0d48bf623af35170c54f120f1e9916f9a5d75bdf427f2c5defb7a02767b52c76"} Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.523127 4711 generic.go:334] "Generic (PLEG): container finished" podID="9ef23d02-cc65-4020-897b-3e114c07d801" containerID="032013d65a98f52d05eef1cb29427306437c6a4885549ffb763ec167e9be582d" exitCode=0 Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.523184 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-885xw" event={"ID":"9ef23d02-cc65-4020-897b-3e114c07d801","Type":"ContainerDied","Data":"032013d65a98f52d05eef1cb29427306437c6a4885549ffb763ec167e9be582d"} Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.523215 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-885xw" event={"ID":"9ef23d02-cc65-4020-897b-3e114c07d801","Type":"ContainerStarted","Data":"02009f5c2e13aa2f4851fe5ff42e8966ed046a60f0c36498af5f108f8f60720d"} Jan 23 08:23:03 crc kubenswrapper[4711]: I0123 08:23:03.678471 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-zv6rd" podStartSLOduration=147.678433413 podStartE2EDuration="2m27.678433413s" podCreationTimestamp="2026-01-23 08:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:23:03.677958082 +0000 UTC m=+169.250914450" watchObservedRunningTime="2026-01-23 08:23:03.678433413 +0000 UTC m=+169.251389781" Jan 23 08:23:04 crc kubenswrapper[4711]: I0123 08:23:04.120191 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-nd7dc" Jan 23 08:23:04 crc kubenswrapper[4711]: I0123 08:23:04.561952 4711 generic.go:334] "Generic (PLEG): container finished" podID="674c5f67-a17e-4462-bc2d-cca82e7f2e56" containerID="142b7bff41e1d9fea96a8b59417f14a80f9c395d7d9f43673776285b97e4beac" exitCode=0 Jan 23 08:23:04 crc kubenswrapper[4711]: I0123 08:23:04.562108 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"674c5f67-a17e-4462-bc2d-cca82e7f2e56","Type":"ContainerDied","Data":"142b7bff41e1d9fea96a8b59417f14a80f9c395d7d9f43673776285b97e4beac"} Jan 23 08:23:04 crc kubenswrapper[4711]: I0123 08:23:04.563859 4711 generic.go:334] "Generic (PLEG): container finished" podID="14fde908-f016-43a7-a521-70081a210846" containerID="37dabcc7d2ddd7ffb65421672f55bcd0d7830cea8000ee45c3c6b77cf6e576d0" exitCode=0 Jan 23 08:23:04 crc kubenswrapper[4711]: I0123 08:23:04.565534 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"14fde908-f016-43a7-a521-70081a210846","Type":"ContainerDied","Data":"37dabcc7d2ddd7ffb65421672f55bcd0d7830cea8000ee45c3c6b77cf6e576d0"} Jan 23 08:23:05 crc kubenswrapper[4711]: I0123 08:23:05.016458 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 23 08:23:05 crc kubenswrapper[4711]: I0123 08:23:05.086721 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/14fde908-f016-43a7-a521-70081a210846-kubelet-dir\") pod \"14fde908-f016-43a7-a521-70081a210846\" (UID: \"14fde908-f016-43a7-a521-70081a210846\") " Jan 23 08:23:05 crc kubenswrapper[4711]: I0123 08:23:05.086823 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/14fde908-f016-43a7-a521-70081a210846-kube-api-access\") pod \"14fde908-f016-43a7-a521-70081a210846\" (UID: \"14fde908-f016-43a7-a521-70081a210846\") " Jan 23 08:23:05 crc kubenswrapper[4711]: I0123 08:23:05.087882 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/14fde908-f016-43a7-a521-70081a210846-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "14fde908-f016-43a7-a521-70081a210846" (UID: "14fde908-f016-43a7-a521-70081a210846"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:23:05 crc kubenswrapper[4711]: I0123 08:23:05.105425 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14fde908-f016-43a7-a521-70081a210846-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "14fde908-f016-43a7-a521-70081a210846" (UID: "14fde908-f016-43a7-a521-70081a210846"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:23:05 crc kubenswrapper[4711]: I0123 08:23:05.188384 4711 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/14fde908-f016-43a7-a521-70081a210846-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 23 08:23:05 crc kubenswrapper[4711]: I0123 08:23:05.188417 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/14fde908-f016-43a7-a521-70081a210846-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 23 08:23:05 crc kubenswrapper[4711]: I0123 08:23:05.586741 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"14fde908-f016-43a7-a521-70081a210846","Type":"ContainerDied","Data":"495b4eee2d8eb290ee65e90134edeacce1ab8ce0bd3d00ac192db1689f4fefd3"} Jan 23 08:23:05 crc kubenswrapper[4711]: I0123 08:23:05.586834 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="495b4eee2d8eb290ee65e90134edeacce1ab8ce0bd3d00ac192db1689f4fefd3" Jan 23 08:23:05 crc kubenswrapper[4711]: I0123 08:23:05.586782 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 23 08:23:06 crc kubenswrapper[4711]: I0123 08:23:06.122699 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 23 08:23:06 crc kubenswrapper[4711]: I0123 08:23:06.216526 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/674c5f67-a17e-4462-bc2d-cca82e7f2e56-kubelet-dir\") pod \"674c5f67-a17e-4462-bc2d-cca82e7f2e56\" (UID: \"674c5f67-a17e-4462-bc2d-cca82e7f2e56\") " Jan 23 08:23:06 crc kubenswrapper[4711]: I0123 08:23:06.216583 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/674c5f67-a17e-4462-bc2d-cca82e7f2e56-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "674c5f67-a17e-4462-bc2d-cca82e7f2e56" (UID: "674c5f67-a17e-4462-bc2d-cca82e7f2e56"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:23:06 crc kubenswrapper[4711]: I0123 08:23:06.216671 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/674c5f67-a17e-4462-bc2d-cca82e7f2e56-kube-api-access\") pod \"674c5f67-a17e-4462-bc2d-cca82e7f2e56\" (UID: \"674c5f67-a17e-4462-bc2d-cca82e7f2e56\") " Jan 23 08:23:06 crc kubenswrapper[4711]: I0123 08:23:06.216913 4711 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/674c5f67-a17e-4462-bc2d-cca82e7f2e56-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 23 08:23:06 crc kubenswrapper[4711]: I0123 08:23:06.235963 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/674c5f67-a17e-4462-bc2d-cca82e7f2e56-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "674c5f67-a17e-4462-bc2d-cca82e7f2e56" (UID: "674c5f67-a17e-4462-bc2d-cca82e7f2e56"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:23:06 crc kubenswrapper[4711]: I0123 08:23:06.319099 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/674c5f67-a17e-4462-bc2d-cca82e7f2e56-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 23 08:23:06 crc kubenswrapper[4711]: I0123 08:23:06.634979 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"674c5f67-a17e-4462-bc2d-cca82e7f2e56","Type":"ContainerDied","Data":"369ddcfa9194603c5c1826c259727c8ce317d14ba8575e06aab29f3d9ca31e9b"} Jan 23 08:23:06 crc kubenswrapper[4711]: I0123 08:23:06.635024 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="369ddcfa9194603c5c1826c259727c8ce317d14ba8575e06aab29f3d9ca31e9b" Jan 23 08:23:06 crc kubenswrapper[4711]: I0123 08:23:06.635156 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 23 08:23:08 crc kubenswrapper[4711]: I0123 08:23:08.219221 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:23:08 crc kubenswrapper[4711]: I0123 08:23:08.227072 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:23:08 crc kubenswrapper[4711]: I0123 08:23:08.456427 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-gxhxb" Jan 23 08:23:14 crc kubenswrapper[4711]: I0123 08:23:14.417741 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 23 08:23:19 crc kubenswrapper[4711]: I0123 08:23:19.789362 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:23:25 crc kubenswrapper[4711]: I0123 08:23:25.994294 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:23:25 crc kubenswrapper[4711]: I0123 08:23:25.994748 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:23:29 crc kubenswrapper[4711]: I0123 08:23:29.048483 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-nkrcc" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.307780 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 23 08:23:37 crc kubenswrapper[4711]: E0123 08:23:37.308348 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14fde908-f016-43a7-a521-70081a210846" containerName="pruner" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.308364 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="14fde908-f016-43a7-a521-70081a210846" containerName="pruner" Jan 23 08:23:37 crc kubenswrapper[4711]: E0123 08:23:37.308395 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="674c5f67-a17e-4462-bc2d-cca82e7f2e56" containerName="pruner" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.308404 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="674c5f67-a17e-4462-bc2d-cca82e7f2e56" containerName="pruner" Jan 23 08:23:37 crc kubenswrapper[4711]: E0123 08:23:37.308417 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ab258d-ba2f-434d-8a15-2a08f24d03cb" containerName="collect-profiles" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.308426 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ab258d-ba2f-434d-8a15-2a08f24d03cb" containerName="collect-profiles" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.308643 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1ab258d-ba2f-434d-8a15-2a08f24d03cb" containerName="collect-profiles" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.308664 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="14fde908-f016-43a7-a521-70081a210846" containerName="pruner" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.308678 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="674c5f67-a17e-4462-bc2d-cca82e7f2e56" containerName="pruner" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.309849 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.312522 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.313645 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.362776 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.390007 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.390405 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.492174 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.492242 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.492427 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.511819 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 23 08:23:37 crc kubenswrapper[4711]: I0123 08:23:37.637850 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 23 08:23:42 crc kubenswrapper[4711]: I0123 08:23:42.905726 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 23 08:23:42 crc kubenswrapper[4711]: I0123 08:23:42.907177 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:23:42 crc kubenswrapper[4711]: I0123 08:23:42.922298 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 23 08:23:42 crc kubenswrapper[4711]: I0123 08:23:42.981779 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f2caef8b-b3d6-4982-a70a-576fbba7ceab-kube-api-access\") pod \"installer-9-crc\" (UID: \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:23:42 crc kubenswrapper[4711]: I0123 08:23:42.981828 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f2caef8b-b3d6-4982-a70a-576fbba7ceab-var-lock\") pod \"installer-9-crc\" (UID: \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:23:42 crc kubenswrapper[4711]: I0123 08:23:42.981874 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f2caef8b-b3d6-4982-a70a-576fbba7ceab-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:23:43 crc kubenswrapper[4711]: I0123 08:23:43.082859 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f2caef8b-b3d6-4982-a70a-576fbba7ceab-kube-api-access\") pod \"installer-9-crc\" (UID: \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:23:43 crc kubenswrapper[4711]: I0123 08:23:43.082918 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f2caef8b-b3d6-4982-a70a-576fbba7ceab-var-lock\") pod \"installer-9-crc\" (UID: \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:23:43 crc kubenswrapper[4711]: I0123 08:23:43.082953 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f2caef8b-b3d6-4982-a70a-576fbba7ceab-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:23:43 crc kubenswrapper[4711]: I0123 08:23:43.083059 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f2caef8b-b3d6-4982-a70a-576fbba7ceab-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:23:43 crc kubenswrapper[4711]: I0123 08:23:43.083111 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f2caef8b-b3d6-4982-a70a-576fbba7ceab-var-lock\") pod \"installer-9-crc\" (UID: \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:23:43 crc kubenswrapper[4711]: I0123 08:23:43.103783 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f2caef8b-b3d6-4982-a70a-576fbba7ceab-kube-api-access\") pod \"installer-9-crc\" (UID: \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:23:43 crc kubenswrapper[4711]: I0123 08:23:43.227377 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:23:44 crc kubenswrapper[4711]: E0123 08:23:44.850804 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 23 08:23:44 crc kubenswrapper[4711]: E0123 08:23:44.851233 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f94vj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-pvd47_openshift-marketplace(de36fa9c-96b8-44d2-8a0d-384ea7ad1806): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 23 08:23:44 crc kubenswrapper[4711]: E0123 08:23:44.852561 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-pvd47" podUID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" Jan 23 08:23:46 crc kubenswrapper[4711]: E0123 08:23:46.110619 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-pvd47" podUID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" Jan 23 08:23:46 crc kubenswrapper[4711]: E0123 08:23:46.195908 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 23 08:23:46 crc kubenswrapper[4711]: E0123 08:23:46.196064 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5xd88,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-q5bgt_openshift-marketplace(ac8209ab-0e14-4fed-9dcc-0978176748a1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 23 08:23:46 crc kubenswrapper[4711]: E0123 08:23:46.197257 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-q5bgt" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" Jan 23 08:23:47 crc kubenswrapper[4711]: E0123 08:23:47.209721 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-q5bgt" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" Jan 23 08:23:47 crc kubenswrapper[4711]: E0123 08:23:47.272438 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 23 08:23:47 crc kubenswrapper[4711]: E0123 08:23:47.272610 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9w8v8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-7xzhz_openshift-marketplace(8f17fdf7-38ee-4c60-a0e9-e293cdd77830): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 23 08:23:47 crc kubenswrapper[4711]: E0123 08:23:47.273771 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-7xzhz" podUID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.748982 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-7xzhz" podUID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.839845 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.840029 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2c8fv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-885xw_openshift-marketplace(9ef23d02-cc65-4020-897b-3e114c07d801): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.841380 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-885xw" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.862565 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.863433 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9jkvl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-qq59p_openshift-marketplace(0a16e14b-953f-491c-9986-b5bafcf8cd0b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.864742 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-qq59p" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.886601 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.886745 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w8mls,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-shg2k_openshift-marketplace(303af9ef-3014-4b33-ba8e-f6b4a9227485): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.887970 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-shg2k" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.899375 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.899597 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s5qdz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-9qd4n_openshift-marketplace(dc70bbe1-6f64-4501-b4cd-afd381a50e86): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.900909 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-9qd4n" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.935226 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.935359 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-thkcp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-kqptv_openshift-marketplace(3c8ded03-464c-4a85-8468-067607680129): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 23 08:23:50 crc kubenswrapper[4711]: E0123 08:23:50.936920 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-kqptv" podUID="3c8ded03-464c-4a85-8468-067607680129" Jan 23 08:23:50 crc kubenswrapper[4711]: I0123 08:23:50.943068 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 23 08:23:50 crc kubenswrapper[4711]: W0123 08:23:50.961144 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf2caef8b_b3d6_4982_a70a_576fbba7ceab.slice/crio-ee99989ff007635adadda64b7debe47923e8f635277c4dbbe8accfbebdf030e7 WatchSource:0}: Error finding container ee99989ff007635adadda64b7debe47923e8f635277c4dbbe8accfbebdf030e7: Status 404 returned error can't find the container with id ee99989ff007635adadda64b7debe47923e8f635277c4dbbe8accfbebdf030e7 Jan 23 08:23:50 crc kubenswrapper[4711]: I0123 08:23:50.981382 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 23 08:23:51 crc kubenswrapper[4711]: I0123 08:23:51.694415 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f2caef8b-b3d6-4982-a70a-576fbba7ceab","Type":"ContainerStarted","Data":"a9fda206706e717e42a7f71e379d806c3b0aea9083d7d0f669e1cef1a9f75c7e"} Jan 23 08:23:51 crc kubenswrapper[4711]: I0123 08:23:51.694748 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f2caef8b-b3d6-4982-a70a-576fbba7ceab","Type":"ContainerStarted","Data":"ee99989ff007635adadda64b7debe47923e8f635277c4dbbe8accfbebdf030e7"} Jan 23 08:23:51 crc kubenswrapper[4711]: I0123 08:23:51.695482 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776","Type":"ContainerStarted","Data":"c802f9af950060f61a6f82f3a27e109b2d03871f602846a39f554110b71f0cbd"} Jan 23 08:23:51 crc kubenswrapper[4711]: I0123 08:23:51.695524 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776","Type":"ContainerStarted","Data":"3e82d24370d1106b7619d924616ef369d299cf191516e341649a16f4d872f98a"} Jan 23 08:23:51 crc kubenswrapper[4711]: E0123 08:23:51.696757 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-kqptv" podUID="3c8ded03-464c-4a85-8468-067607680129" Jan 23 08:23:51 crc kubenswrapper[4711]: E0123 08:23:51.696968 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-9qd4n" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" Jan 23 08:23:51 crc kubenswrapper[4711]: E0123 08:23:51.696992 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-qq59p" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" Jan 23 08:23:51 crc kubenswrapper[4711]: E0123 08:23:51.697034 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-shg2k" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" Jan 23 08:23:51 crc kubenswrapper[4711]: E0123 08:23:51.697668 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-885xw" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" Jan 23 08:23:51 crc kubenswrapper[4711]: I0123 08:23:51.711550 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=9.711532208 podStartE2EDuration="9.711532208s" podCreationTimestamp="2026-01-23 08:23:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:23:51.706914862 +0000 UTC m=+217.279871230" watchObservedRunningTime="2026-01-23 08:23:51.711532208 +0000 UTC m=+217.284488576" Jan 23 08:23:51 crc kubenswrapper[4711]: I0123 08:23:51.807316 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=14.807292726 podStartE2EDuration="14.807292726s" podCreationTimestamp="2026-01-23 08:23:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:23:51.802427334 +0000 UTC m=+217.375383702" watchObservedRunningTime="2026-01-23 08:23:51.807292726 +0000 UTC m=+217.380249094" Jan 23 08:23:52 crc kubenswrapper[4711]: I0123 08:23:52.702700 4711 generic.go:334] "Generic (PLEG): container finished" podID="1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776" containerID="c802f9af950060f61a6f82f3a27e109b2d03871f602846a39f554110b71f0cbd" exitCode=0 Jan 23 08:23:52 crc kubenswrapper[4711]: I0123 08:23:52.702839 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776","Type":"ContainerDied","Data":"c802f9af950060f61a6f82f3a27e109b2d03871f602846a39f554110b71f0cbd"} Jan 23 08:23:53 crc kubenswrapper[4711]: I0123 08:23:53.893202 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 23 08:23:53 crc kubenswrapper[4711]: I0123 08:23:53.948763 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776-kubelet-dir\") pod \"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776\" (UID: \"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776\") " Jan 23 08:23:53 crc kubenswrapper[4711]: I0123 08:23:53.948882 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776-kube-api-access\") pod \"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776\" (UID: \"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776\") " Jan 23 08:23:53 crc kubenswrapper[4711]: I0123 08:23:53.948921 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776" (UID: "1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:23:53 crc kubenswrapper[4711]: I0123 08:23:53.949277 4711 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 23 08:23:53 crc kubenswrapper[4711]: I0123 08:23:53.956863 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776" (UID: "1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:23:54 crc kubenswrapper[4711]: I0123 08:23:54.050097 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 23 08:23:54 crc kubenswrapper[4711]: I0123 08:23:54.714048 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776","Type":"ContainerDied","Data":"3e82d24370d1106b7619d924616ef369d299cf191516e341649a16f4d872f98a"} Jan 23 08:23:54 crc kubenswrapper[4711]: I0123 08:23:54.714311 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e82d24370d1106b7619d924616ef369d299cf191516e341649a16f4d872f98a" Jan 23 08:23:54 crc kubenswrapper[4711]: I0123 08:23:54.714086 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 23 08:23:55 crc kubenswrapper[4711]: I0123 08:23:55.993578 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:23:55 crc kubenswrapper[4711]: I0123 08:23:55.993634 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:23:55 crc kubenswrapper[4711]: I0123 08:23:55.993677 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:23:55 crc kubenswrapper[4711]: I0123 08:23:55.994194 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5"} pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 23 08:23:55 crc kubenswrapper[4711]: I0123 08:23:55.994276 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" containerID="cri-o://4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5" gracePeriod=600 Jan 23 08:23:56 crc kubenswrapper[4711]: I0123 08:23:56.726787 4711 generic.go:334] "Generic (PLEG): container finished" podID="3846d4e0-cfda-4e0b-8747-85267de12736" containerID="4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5" exitCode=0 Jan 23 08:23:56 crc kubenswrapper[4711]: I0123 08:23:56.726873 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerDied","Data":"4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5"} Jan 23 08:23:57 crc kubenswrapper[4711]: I0123 08:23:57.737447 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"b064804566a711ebf557e11734763f83796197487bf136fb43efc8f895f896a6"} Jan 23 08:23:58 crc kubenswrapper[4711]: I0123 08:23:58.748196 4711 generic.go:334] "Generic (PLEG): container finished" podID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" containerID="1d0cc74407b54dd309dad58b55007ccb5d79b6dc15a0d996dcc3bb80f67a0c71" exitCode=0 Jan 23 08:23:58 crc kubenswrapper[4711]: I0123 08:23:58.748544 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvd47" event={"ID":"de36fa9c-96b8-44d2-8a0d-384ea7ad1806","Type":"ContainerDied","Data":"1d0cc74407b54dd309dad58b55007ccb5d79b6dc15a0d996dcc3bb80f67a0c71"} Jan 23 08:23:59 crc kubenswrapper[4711]: I0123 08:23:59.759761 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvd47" event={"ID":"de36fa9c-96b8-44d2-8a0d-384ea7ad1806","Type":"ContainerStarted","Data":"8af8a0fcc5a7a15e9069c80d60c58e3d16711fb0316bf13c6b727331ad0b9cd3"} Jan 23 08:23:59 crc kubenswrapper[4711]: I0123 08:23:59.776637 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pvd47" podStartSLOduration=3.994541651 podStartE2EDuration="1m1.776619689s" podCreationTimestamp="2026-01-23 08:22:58 +0000 UTC" firstStartedPulling="2026-01-23 08:23:01.414235885 +0000 UTC m=+166.987192253" lastFinishedPulling="2026-01-23 08:23:59.196313923 +0000 UTC m=+224.769270291" observedRunningTime="2026-01-23 08:23:59.775615064 +0000 UTC m=+225.348571442" watchObservedRunningTime="2026-01-23 08:23:59.776619689 +0000 UTC m=+225.349576057" Jan 23 08:24:03 crc kubenswrapper[4711]: I0123 08:24:03.782097 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q5bgt" event={"ID":"ac8209ab-0e14-4fed-9dcc-0978176748a1","Type":"ContainerStarted","Data":"42b79e0cc88ccf00ce44cd26a24897cbff7bf01cefb1fcd1afbe08dffd71f338"} Jan 23 08:24:03 crc kubenswrapper[4711]: I0123 08:24:03.785011 4711 generic.go:334] "Generic (PLEG): container finished" podID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" containerID="9d810b44218c9d74c6c8be3341a2a9aac420c4826ace8be94e3b71d30a21e7d4" exitCode=0 Jan 23 08:24:03 crc kubenswrapper[4711]: I0123 08:24:03.785046 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzhz" event={"ID":"8f17fdf7-38ee-4c60-a0e9-e293cdd77830","Type":"ContainerDied","Data":"9d810b44218c9d74c6c8be3341a2a9aac420c4826ace8be94e3b71d30a21e7d4"} Jan 23 08:24:04 crc kubenswrapper[4711]: I0123 08:24:04.791836 4711 generic.go:334] "Generic (PLEG): container finished" podID="ac8209ab-0e14-4fed-9dcc-0978176748a1" containerID="42b79e0cc88ccf00ce44cd26a24897cbff7bf01cefb1fcd1afbe08dffd71f338" exitCode=0 Jan 23 08:24:04 crc kubenswrapper[4711]: I0123 08:24:04.791964 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q5bgt" event={"ID":"ac8209ab-0e14-4fed-9dcc-0978176748a1","Type":"ContainerDied","Data":"42b79e0cc88ccf00ce44cd26a24897cbff7bf01cefb1fcd1afbe08dffd71f338"} Jan 23 08:24:04 crc kubenswrapper[4711]: I0123 08:24:04.794751 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzhz" event={"ID":"8f17fdf7-38ee-4c60-a0e9-e293cdd77830","Type":"ContainerStarted","Data":"8ff61d4563602564033e609655da6c2bdeef004c75029401ac22e52ede6f9268"} Jan 23 08:24:04 crc kubenswrapper[4711]: I0123 08:24:04.797141 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqptv" event={"ID":"3c8ded03-464c-4a85-8468-067607680129","Type":"ContainerStarted","Data":"2f48a6b83df4f3dab6dc0ac340d77e6c996886e38bde3ceebbc3d65bc936e6a5"} Jan 23 08:24:04 crc kubenswrapper[4711]: I0123 08:24:04.798768 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-885xw" event={"ID":"9ef23d02-cc65-4020-897b-3e114c07d801","Type":"ContainerStarted","Data":"63addd9321b55d26b9624e91027963f213c3228268dff7adf9855543e5230989"} Jan 23 08:24:04 crc kubenswrapper[4711]: I0123 08:24:04.800655 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shg2k" event={"ID":"303af9ef-3014-4b33-ba8e-f6b4a9227485","Type":"ContainerStarted","Data":"755633f637756d4a7d83d5c5bd89f565cdb0955e3c2dc1326c8ffcf6507fc381"} Jan 23 08:24:04 crc kubenswrapper[4711]: I0123 08:24:04.828852 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7xzhz" podStartSLOduration=3.947868352 podStartE2EDuration="1m4.828835207s" podCreationTimestamp="2026-01-23 08:23:00 +0000 UTC" firstStartedPulling="2026-01-23 08:23:03.501081824 +0000 UTC m=+169.074038202" lastFinishedPulling="2026-01-23 08:24:04.382048689 +0000 UTC m=+229.955005057" observedRunningTime="2026-01-23 08:24:04.823275149 +0000 UTC m=+230.396231517" watchObservedRunningTime="2026-01-23 08:24:04.828835207 +0000 UTC m=+230.401791575" Jan 23 08:24:05 crc kubenswrapper[4711]: I0123 08:24:05.807382 4711 generic.go:334] "Generic (PLEG): container finished" podID="3c8ded03-464c-4a85-8468-067607680129" containerID="2f48a6b83df4f3dab6dc0ac340d77e6c996886e38bde3ceebbc3d65bc936e6a5" exitCode=0 Jan 23 08:24:05 crc kubenswrapper[4711]: I0123 08:24:05.807452 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqptv" event={"ID":"3c8ded03-464c-4a85-8468-067607680129","Type":"ContainerDied","Data":"2f48a6b83df4f3dab6dc0ac340d77e6c996886e38bde3ceebbc3d65bc936e6a5"} Jan 23 08:24:05 crc kubenswrapper[4711]: I0123 08:24:05.809331 4711 generic.go:334] "Generic (PLEG): container finished" podID="9ef23d02-cc65-4020-897b-3e114c07d801" containerID="63addd9321b55d26b9624e91027963f213c3228268dff7adf9855543e5230989" exitCode=0 Jan 23 08:24:05 crc kubenswrapper[4711]: I0123 08:24:05.809381 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-885xw" event={"ID":"9ef23d02-cc65-4020-897b-3e114c07d801","Type":"ContainerDied","Data":"63addd9321b55d26b9624e91027963f213c3228268dff7adf9855543e5230989"} Jan 23 08:24:05 crc kubenswrapper[4711]: I0123 08:24:05.816387 4711 generic.go:334] "Generic (PLEG): container finished" podID="303af9ef-3014-4b33-ba8e-f6b4a9227485" containerID="755633f637756d4a7d83d5c5bd89f565cdb0955e3c2dc1326c8ffcf6507fc381" exitCode=0 Jan 23 08:24:05 crc kubenswrapper[4711]: I0123 08:24:05.816431 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shg2k" event={"ID":"303af9ef-3014-4b33-ba8e-f6b4a9227485","Type":"ContainerDied","Data":"755633f637756d4a7d83d5c5bd89f565cdb0955e3c2dc1326c8ffcf6507fc381"} Jan 23 08:24:09 crc kubenswrapper[4711]: I0123 08:24:09.127702 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:24:09 crc kubenswrapper[4711]: I0123 08:24:09.128247 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:24:09 crc kubenswrapper[4711]: I0123 08:24:09.565025 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:24:09 crc kubenswrapper[4711]: I0123 08:24:09.883920 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:24:10 crc kubenswrapper[4711]: I0123 08:24:10.635646 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:24:10 crc kubenswrapper[4711]: I0123 08:24:10.636461 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:24:10 crc kubenswrapper[4711]: I0123 08:24:10.688450 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:24:10 crc kubenswrapper[4711]: I0123 08:24:10.721421 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pvd47"] Jan 23 08:24:10 crc kubenswrapper[4711]: I0123 08:24:10.889289 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:24:11 crc kubenswrapper[4711]: I0123 08:24:11.853275 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pvd47" podUID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" containerName="registry-server" containerID="cri-o://8af8a0fcc5a7a15e9069c80d60c58e3d16711fb0316bf13c6b727331ad0b9cd3" gracePeriod=2 Jan 23 08:24:12 crc kubenswrapper[4711]: I0123 08:24:12.859358 4711 generic.go:334] "Generic (PLEG): container finished" podID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" containerID="8af8a0fcc5a7a15e9069c80d60c58e3d16711fb0316bf13c6b727331ad0b9cd3" exitCode=0 Jan 23 08:24:12 crc kubenswrapper[4711]: I0123 08:24:12.859382 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvd47" event={"ID":"de36fa9c-96b8-44d2-8a0d-384ea7ad1806","Type":"ContainerDied","Data":"8af8a0fcc5a7a15e9069c80d60c58e3d16711fb0316bf13c6b727331ad0b9cd3"} Jan 23 08:24:14 crc kubenswrapper[4711]: I0123 08:24:14.837960 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:24:14 crc kubenswrapper[4711]: I0123 08:24:14.870849 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pvd47" event={"ID":"de36fa9c-96b8-44d2-8a0d-384ea7ad1806","Type":"ContainerDied","Data":"8d76494513662d7f86375fa5ca2c8ba7f7c46f4a90df94213c5ed60252eded5e"} Jan 23 08:24:14 crc kubenswrapper[4711]: I0123 08:24:14.870901 4711 scope.go:117] "RemoveContainer" containerID="8af8a0fcc5a7a15e9069c80d60c58e3d16711fb0316bf13c6b727331ad0b9cd3" Jan 23 08:24:14 crc kubenswrapper[4711]: I0123 08:24:14.870908 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pvd47" Jan 23 08:24:15 crc kubenswrapper[4711]: I0123 08:24:15.015680 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-catalog-content\") pod \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\" (UID: \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\") " Jan 23 08:24:15 crc kubenswrapper[4711]: I0123 08:24:15.016074 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f94vj\" (UniqueName: \"kubernetes.io/projected/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-kube-api-access-f94vj\") pod \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\" (UID: \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\") " Jan 23 08:24:15 crc kubenswrapper[4711]: I0123 08:24:15.016117 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-utilities\") pod \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\" (UID: \"de36fa9c-96b8-44d2-8a0d-384ea7ad1806\") " Jan 23 08:24:15 crc kubenswrapper[4711]: I0123 08:24:15.017019 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-utilities" (OuterVolumeSpecName: "utilities") pod "de36fa9c-96b8-44d2-8a0d-384ea7ad1806" (UID: "de36fa9c-96b8-44d2-8a0d-384ea7ad1806"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:24:15 crc kubenswrapper[4711]: I0123 08:24:15.021480 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-kube-api-access-f94vj" (OuterVolumeSpecName: "kube-api-access-f94vj") pod "de36fa9c-96b8-44d2-8a0d-384ea7ad1806" (UID: "de36fa9c-96b8-44d2-8a0d-384ea7ad1806"). InnerVolumeSpecName "kube-api-access-f94vj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:24:15 crc kubenswrapper[4711]: I0123 08:24:15.060986 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de36fa9c-96b8-44d2-8a0d-384ea7ad1806" (UID: "de36fa9c-96b8-44d2-8a0d-384ea7ad1806"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:24:15 crc kubenswrapper[4711]: I0123 08:24:15.117486 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:15 crc kubenswrapper[4711]: I0123 08:24:15.117542 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f94vj\" (UniqueName: \"kubernetes.io/projected/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-kube-api-access-f94vj\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:15 crc kubenswrapper[4711]: I0123 08:24:15.117556 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de36fa9c-96b8-44d2-8a0d-384ea7ad1806-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:15 crc kubenswrapper[4711]: I0123 08:24:15.201662 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pvd47"] Jan 23 08:24:15 crc kubenswrapper[4711]: I0123 08:24:15.205389 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pvd47"] Jan 23 08:24:15 crc kubenswrapper[4711]: I0123 08:24:15.481945 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" path="/var/lib/kubelet/pods/de36fa9c-96b8-44d2-8a0d-384ea7ad1806/volumes" Jan 23 08:24:17 crc kubenswrapper[4711]: I0123 08:24:17.854023 4711 scope.go:117] "RemoveContainer" containerID="1d0cc74407b54dd309dad58b55007ccb5d79b6dc15a0d996dcc3bb80f67a0c71" Jan 23 08:24:18 crc kubenswrapper[4711]: I0123 08:24:18.320713 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-q5hjq"] Jan 23 08:24:22 crc kubenswrapper[4711]: I0123 08:24:22.131857 4711 scope.go:117] "RemoveContainer" containerID="c12b1e33c25985533db1ac3a3ccd0bf897f86d5e5ffc0a2d3d9e45fcbbe4e442" Jan 23 08:24:22 crc kubenswrapper[4711]: I0123 08:24:22.919177 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqptv" event={"ID":"3c8ded03-464c-4a85-8468-067607680129","Type":"ContainerStarted","Data":"dbe9cb549bee3b6070e10ca9a47cc03f0babb2de19acd8068614105f6fa7154a"} Jan 23 08:24:22 crc kubenswrapper[4711]: I0123 08:24:22.921779 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-885xw" event={"ID":"9ef23d02-cc65-4020-897b-3e114c07d801","Type":"ContainerStarted","Data":"7ed25de54c7ac3f30e02bac19b14adfa13f4b548b199038259df1e4936d6aec3"} Jan 23 08:24:22 crc kubenswrapper[4711]: I0123 08:24:22.923870 4711 generic.go:334] "Generic (PLEG): container finished" podID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" containerID="48810ebd2ea1e79769533976b8c4270dfc2a1a28a0d87e7ede8f271770701042" exitCode=0 Jan 23 08:24:22 crc kubenswrapper[4711]: I0123 08:24:22.923930 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9qd4n" event={"ID":"dc70bbe1-6f64-4501-b4cd-afd381a50e86","Type":"ContainerDied","Data":"48810ebd2ea1e79769533976b8c4270dfc2a1a28a0d87e7ede8f271770701042"} Jan 23 08:24:22 crc kubenswrapper[4711]: I0123 08:24:22.927979 4711 generic.go:334] "Generic (PLEG): container finished" podID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" containerID="afa046fdb95761c8923dbf7af7860bf7b9452121fc3869e6b254dc5b78feadd4" exitCode=0 Jan 23 08:24:22 crc kubenswrapper[4711]: I0123 08:24:22.928053 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qq59p" event={"ID":"0a16e14b-953f-491c-9986-b5bafcf8cd0b","Type":"ContainerDied","Data":"afa046fdb95761c8923dbf7af7860bf7b9452121fc3869e6b254dc5b78feadd4"} Jan 23 08:24:22 crc kubenswrapper[4711]: I0123 08:24:22.937891 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shg2k" event={"ID":"303af9ef-3014-4b33-ba8e-f6b4a9227485","Type":"ContainerStarted","Data":"6bf029d6e69febaae87ae3a440c7a5630dd2120cfbd6f99db0fe2f896ee02e4b"} Jan 23 08:24:22 crc kubenswrapper[4711]: I0123 08:24:22.943277 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q5bgt" event={"ID":"ac8209ab-0e14-4fed-9dcc-0978176748a1","Type":"ContainerStarted","Data":"d201da90ae40402dfe94d1564ffb7b432cfed3f65f708eb302d38104df07574a"} Jan 23 08:24:22 crc kubenswrapper[4711]: I0123 08:24:22.952189 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kqptv" podStartSLOduration=10.761982941 podStartE2EDuration="1m21.952171625s" podCreationTimestamp="2026-01-23 08:23:01 +0000 UTC" firstStartedPulling="2026-01-23 08:23:03.504667175 +0000 UTC m=+169.077623543" lastFinishedPulling="2026-01-23 08:24:14.694855859 +0000 UTC m=+240.267812227" observedRunningTime="2026-01-23 08:24:22.947882467 +0000 UTC m=+248.520838855" watchObservedRunningTime="2026-01-23 08:24:22.952171625 +0000 UTC m=+248.525127993" Jan 23 08:24:22 crc kubenswrapper[4711]: I0123 08:24:22.969766 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q5bgt" podStartSLOduration=6.320292543 podStartE2EDuration="1m24.969746937s" podCreationTimestamp="2026-01-23 08:22:58 +0000 UTC" firstStartedPulling="2026-01-23 08:23:00.366811621 +0000 UTC m=+165.939767989" lastFinishedPulling="2026-01-23 08:24:19.016266015 +0000 UTC m=+244.589222383" observedRunningTime="2026-01-23 08:24:22.967196953 +0000 UTC m=+248.540153321" watchObservedRunningTime="2026-01-23 08:24:22.969746937 +0000 UTC m=+248.542703305" Jan 23 08:24:22 crc kubenswrapper[4711]: I0123 08:24:22.991900 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-shg2k" podStartSLOduration=6.796521911 podStartE2EDuration="1m24.991883614s" podCreationTimestamp="2026-01-23 08:22:58 +0000 UTC" firstStartedPulling="2026-01-23 08:23:00.362740628 +0000 UTC m=+165.935696996" lastFinishedPulling="2026-01-23 08:24:18.558102331 +0000 UTC m=+244.131058699" observedRunningTime="2026-01-23 08:24:22.988628722 +0000 UTC m=+248.561585100" watchObservedRunningTime="2026-01-23 08:24:22.991883614 +0000 UTC m=+248.564839982" Jan 23 08:24:23 crc kubenswrapper[4711]: I0123 08:24:23.015670 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-885xw" podStartSLOduration=9.649005945 podStartE2EDuration="1m22.015654602s" podCreationTimestamp="2026-01-23 08:23:01 +0000 UTC" firstStartedPulling="2026-01-23 08:23:03.525763884 +0000 UTC m=+169.098720252" lastFinishedPulling="2026-01-23 08:24:15.892412541 +0000 UTC m=+241.465368909" observedRunningTime="2026-01-23 08:24:23.012435271 +0000 UTC m=+248.585391639" watchObservedRunningTime="2026-01-23 08:24:23.015654602 +0000 UTC m=+248.588610970" Jan 23 08:24:24 crc kubenswrapper[4711]: I0123 08:24:24.956620 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9qd4n" event={"ID":"dc70bbe1-6f64-4501-b4cd-afd381a50e86","Type":"ContainerStarted","Data":"da240b8885ee249428d29acbcca2a09d0ef6adeb5bedbf84c08e6707aecb91d1"} Jan 23 08:24:24 crc kubenswrapper[4711]: I0123 08:24:24.958658 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qq59p" event={"ID":"0a16e14b-953f-491c-9986-b5bafcf8cd0b","Type":"ContainerStarted","Data":"aafaec601ef4340da071c13ae3b0d6eb58e8c9a27b55af2de7dcf0248244fccf"} Jan 23 08:24:24 crc kubenswrapper[4711]: I0123 08:24:24.985182 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9qd4n" podStartSLOduration=3.945930238 podStartE2EDuration="1m26.985159291s" podCreationTimestamp="2026-01-23 08:22:58 +0000 UTC" firstStartedPulling="2026-01-23 08:23:00.387882499 +0000 UTC m=+165.960838867" lastFinishedPulling="2026-01-23 08:24:23.427111532 +0000 UTC m=+249.000067920" observedRunningTime="2026-01-23 08:24:24.980250188 +0000 UTC m=+250.553206556" watchObservedRunningTime="2026-01-23 08:24:24.985159291 +0000 UTC m=+250.558115659" Jan 23 08:24:25 crc kubenswrapper[4711]: I0123 08:24:25.013345 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qq59p" podStartSLOduration=5.119443753 podStartE2EDuration="1m25.01332694s" podCreationTimestamp="2026-01-23 08:23:00 +0000 UTC" firstStartedPulling="2026-01-23 08:23:03.487681709 +0000 UTC m=+169.060638077" lastFinishedPulling="2026-01-23 08:24:23.381564896 +0000 UTC m=+248.954521264" observedRunningTime="2026-01-23 08:24:25.011496133 +0000 UTC m=+250.584452501" watchObservedRunningTime="2026-01-23 08:24:25.01332694 +0000 UTC m=+250.586283308" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.422359 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.423517 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.463094 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.649177 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.649284 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.687080 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.825157 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.825212 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.853094 4711 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 23 08:24:28 crc kubenswrapper[4711]: E0123 08:24:28.853333 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" containerName="registry-server" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.853345 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" containerName="registry-server" Jan 23 08:24:28 crc kubenswrapper[4711]: E0123 08:24:28.853358 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776" containerName="pruner" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.853364 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776" containerName="pruner" Jan 23 08:24:28 crc kubenswrapper[4711]: E0123 08:24:28.853374 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" containerName="extract-content" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.853382 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" containerName="extract-content" Jan 23 08:24:28 crc kubenswrapper[4711]: E0123 08:24:28.853394 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" containerName="extract-utilities" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.853401 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" containerName="extract-utilities" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.853489 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="de36fa9c-96b8-44d2-8a0d-384ea7ad1806" containerName="registry-server" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.853520 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fb2dabe-ac0f-4c5a-be5f-e2aceaab5776" containerName="pruner" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.853828 4711 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.854078 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca" gracePeriod=15 Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.854239 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.854576 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447" gracePeriod=15 Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.854625 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de" gracePeriod=15 Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.854658 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801" gracePeriod=15 Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.854747 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5" gracePeriod=15 Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856142 4711 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 23 08:24:28 crc kubenswrapper[4711]: E0123 08:24:28.856413 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856435 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 23 08:24:28 crc kubenswrapper[4711]: E0123 08:24:28.856448 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856456 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 23 08:24:28 crc kubenswrapper[4711]: E0123 08:24:28.856468 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856476 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 23 08:24:28 crc kubenswrapper[4711]: E0123 08:24:28.856493 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856516 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 23 08:24:28 crc kubenswrapper[4711]: E0123 08:24:28.856530 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856538 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 23 08:24:28 crc kubenswrapper[4711]: E0123 08:24:28.856551 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856558 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 23 08:24:28 crc kubenswrapper[4711]: E0123 08:24:28.856570 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856579 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856709 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856731 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856742 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856750 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856765 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.856776 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.868256 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.906803 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.906855 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.906890 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.906953 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.906983 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.907011 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.907033 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:28 crc kubenswrapper[4711]: I0123 08:24:28.907087 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.010103 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.010404 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.010458 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.010497 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.010537 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.010556 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.010591 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.010608 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.010655 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.010229 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.010700 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.011183 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.011465 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.011492 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.011527 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.011754 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.023465 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.029103 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.991916 4711 generic.go:334] "Generic (PLEG): container finished" podID="f2caef8b-b3d6-4982-a70a-576fbba7ceab" containerID="a9fda206706e717e42a7f71e379d806c3b0aea9083d7d0f669e1cef1a9f75c7e" exitCode=0 Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.992034 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f2caef8b-b3d6-4982-a70a-576fbba7ceab","Type":"ContainerDied","Data":"a9fda206706e717e42a7f71e379d806c3b0aea9083d7d0f669e1cef1a9f75c7e"} Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.995210 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.997151 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.998091 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447" exitCode=0 Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.998126 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de" exitCode=0 Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.998137 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801" exitCode=0 Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.998148 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5" exitCode=2 Jan 23 08:24:29 crc kubenswrapper[4711]: I0123 08:24:29.998408 4711 scope.go:117] "RemoveContainer" containerID="cc4fb6678f77ff8f17e63e4179405d9205d7a290955766a158fb7f1c4980b49f" Jan 23 08:24:30 crc kubenswrapper[4711]: I0123 08:24:30.984887 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:24:30 crc kubenswrapper[4711]: I0123 08:24:30.986543 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.016238 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.043961 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.253599 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.254877 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.257611 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.342615 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.342681 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f2caef8b-b3d6-4982-a70a-576fbba7ceab-var-lock\") pod \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\" (UID: \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\") " Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.342714 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.342766 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.342799 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f2caef8b-b3d6-4982-a70a-576fbba7ceab-kube-api-access\") pod \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\" (UID: \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\") " Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.342846 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f2caef8b-b3d6-4982-a70a-576fbba7ceab-kubelet-dir\") pod \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\" (UID: \"f2caef8b-b3d6-4982-a70a-576fbba7ceab\") " Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.343166 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f2caef8b-b3d6-4982-a70a-576fbba7ceab-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f2caef8b-b3d6-4982-a70a-576fbba7ceab" (UID: "f2caef8b-b3d6-4982-a70a-576fbba7ceab"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.343215 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.343234 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f2caef8b-b3d6-4982-a70a-576fbba7ceab-var-lock" (OuterVolumeSpecName: "var-lock") pod "f2caef8b-b3d6-4982-a70a-576fbba7ceab" (UID: "f2caef8b-b3d6-4982-a70a-576fbba7ceab"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.343254 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.343274 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.349945 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2caef8b-b3d6-4982-a70a-576fbba7ceab-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f2caef8b-b3d6-4982-a70a-576fbba7ceab" (UID: "f2caef8b-b3d6-4982-a70a-576fbba7ceab"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.445740 4711 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.445780 4711 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f2caef8b-b3d6-4982-a70a-576fbba7ceab-var-lock\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.445794 4711 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.445804 4711 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.445814 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f2caef8b-b3d6-4982-a70a-576fbba7ceab-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.445827 4711 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f2caef8b-b3d6-4982-a70a-576fbba7ceab-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.482602 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.803714 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.803778 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:24:31 crc kubenswrapper[4711]: I0123 08:24:31.842084 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.025236 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.025254 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f2caef8b-b3d6-4982-a70a-576fbba7ceab","Type":"ContainerDied","Data":"ee99989ff007635adadda64b7debe47923e8f635277c4dbbe8accfbebdf030e7"} Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.025287 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee99989ff007635adadda64b7debe47923e8f635277c4dbbe8accfbebdf030e7" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.033357 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.034758 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca" exitCode=0 Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.035494 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.035852 4711 scope.go:117] "RemoveContainer" containerID="3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.056474 4711 scope.go:117] "RemoveContainer" containerID="d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.071633 4711 scope.go:117] "RemoveContainer" containerID="1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.082942 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.089129 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.092574 4711 scope.go:117] "RemoveContainer" containerID="bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.109567 4711 scope.go:117] "RemoveContainer" containerID="9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.126081 4711 scope.go:117] "RemoveContainer" containerID="f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.150318 4711 scope.go:117] "RemoveContainer" containerID="3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447" Jan 23 08:24:32 crc kubenswrapper[4711]: E0123 08:24:32.150833 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\": container with ID starting with 3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447 not found: ID does not exist" containerID="3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.150869 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447"} err="failed to get container status \"3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\": rpc error: code = NotFound desc = could not find container \"3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447\": container with ID starting with 3c08df2c9eec39cf75e46bf2118ea0492960322004746b5aa60ed0c15a279447 not found: ID does not exist" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.150906 4711 scope.go:117] "RemoveContainer" containerID="d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de" Jan 23 08:24:32 crc kubenswrapper[4711]: E0123 08:24:32.151334 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\": container with ID starting with d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de not found: ID does not exist" containerID="d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.151378 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de"} err="failed to get container status \"d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\": rpc error: code = NotFound desc = could not find container \"d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de\": container with ID starting with d8ce38061b1a661f4d9d28c0b85c7e41888a5c6ac08893b864d372cf5caeb3de not found: ID does not exist" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.151393 4711 scope.go:117] "RemoveContainer" containerID="1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801" Jan 23 08:24:32 crc kubenswrapper[4711]: E0123 08:24:32.151828 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\": container with ID starting with 1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801 not found: ID does not exist" containerID="1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.151880 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801"} err="failed to get container status \"1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\": rpc error: code = NotFound desc = could not find container \"1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801\": container with ID starting with 1c3b264c90e37672b68ea0e2d96e67d84b34683bbcaa81b9023c2ca71941c801 not found: ID does not exist" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.151900 4711 scope.go:117] "RemoveContainer" containerID="bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5" Jan 23 08:24:32 crc kubenswrapper[4711]: E0123 08:24:32.152135 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\": container with ID starting with bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5 not found: ID does not exist" containerID="bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.152165 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5"} err="failed to get container status \"bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\": rpc error: code = NotFound desc = could not find container \"bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5\": container with ID starting with bee112f5cf970c082ba59e54905de9ec71fffeaec64c97d6587dcb48bf9da2d5 not found: ID does not exist" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.152182 4711 scope.go:117] "RemoveContainer" containerID="9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca" Jan 23 08:24:32 crc kubenswrapper[4711]: E0123 08:24:32.152928 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\": container with ID starting with 9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca not found: ID does not exist" containerID="9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.152974 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca"} err="failed to get container status \"9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\": rpc error: code = NotFound desc = could not find container \"9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca\": container with ID starting with 9546808d3ed03d34e5e555b62cb961d2e1c16c0833018d9050192d923f08ccca not found: ID does not exist" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.152991 4711 scope.go:117] "RemoveContainer" containerID="f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99" Jan 23 08:24:32 crc kubenswrapper[4711]: E0123 08:24:32.154299 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\": container with ID starting with f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99 not found: ID does not exist" containerID="f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.154338 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99"} err="failed to get container status \"f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\": rpc error: code = NotFound desc = could not find container \"f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99\": container with ID starting with f8cc9c7e776f0071e77de3b71cb0bfae7c42ab5f60488237364039d6e24b1e99 not found: ID does not exist" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.168876 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.169044 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:24:32 crc kubenswrapper[4711]: I0123 08:24:32.212900 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:24:33 crc kubenswrapper[4711]: I0123 08:24:33.103800 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:24:33 crc kubenswrapper[4711]: E0123 08:24:33.554081 4711 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.220:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" volumeName="registry-storage" Jan 23 08:24:33 crc kubenswrapper[4711]: E0123 08:24:33.880533 4711 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.220:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:33 crc kubenswrapper[4711]: I0123 08:24:33.881095 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:33 crc kubenswrapper[4711]: I0123 08:24:33.896228 4711 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:33 crc kubenswrapper[4711]: I0123 08:24:33.896547 4711 status_manager.go:851] "Failed to get status for pod" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" pod="openshift-marketplace/redhat-marketplace-qq59p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-qq59p\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:33 crc kubenswrapper[4711]: I0123 08:24:33.896753 4711 status_manager.go:851] "Failed to get status for pod" podUID="f2caef8b-b3d6-4982-a70a-576fbba7ceab" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:33 crc kubenswrapper[4711]: I0123 08:24:33.896908 4711 status_manager.go:851] "Failed to get status for pod" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" pod="openshift-marketplace/community-operators-q5bgt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-q5bgt\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:33 crc kubenswrapper[4711]: I0123 08:24:33.897066 4711 status_manager.go:851] "Failed to get status for pod" podUID="3c8ded03-464c-4a85-8468-067607680129" pod="openshift-marketplace/redhat-operators-kqptv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-kqptv\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:33 crc kubenswrapper[4711]: I0123 08:24:33.897196 4711 status_manager.go:851] "Failed to get status for pod" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" pod="openshift-marketplace/community-operators-shg2k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-shg2k\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:33 crc kubenswrapper[4711]: I0123 08:24:33.897376 4711 status_manager.go:851] "Failed to get status for pod" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" pod="openshift-marketplace/redhat-operators-885xw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-885xw\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:33 crc kubenswrapper[4711]: W0123 08:24:33.901003 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-07a45a1dadf82649955292b3ae4365ea6e3109c9f0cae2c705a744bc69d92012 WatchSource:0}: Error finding container 07a45a1dadf82649955292b3ae4365ea6e3109c9f0cae2c705a744bc69d92012: Status 404 returned error can't find the container with id 07a45a1dadf82649955292b3ae4365ea6e3109c9f0cae2c705a744bc69d92012 Jan 23 08:24:33 crc kubenswrapper[4711]: E0123 08:24:33.903734 4711 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.220:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188d4e9f1c0496a6 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-23 08:24:33.90330231 +0000 UTC m=+259.476258678,LastTimestamp:2026-01-23 08:24:33.90330231 +0000 UTC m=+259.476258678,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 23 08:24:34 crc kubenswrapper[4711]: I0123 08:24:34.071793 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"07a45a1dadf82649955292b3ae4365ea6e3109c9f0cae2c705a744bc69d92012"} Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.077637 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"32f33ea11baca093aa2bc580070509d1e686b817bf475debed793dacd53e886b"} Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.078278 4711 status_manager.go:851] "Failed to get status for pod" podUID="f2caef8b-b3d6-4982-a70a-576fbba7ceab" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.078644 4711 status_manager.go:851] "Failed to get status for pod" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" pod="openshift-marketplace/redhat-marketplace-qq59p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-qq59p\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.078860 4711 status_manager.go:851] "Failed to get status for pod" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" pod="openshift-marketplace/community-operators-q5bgt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-q5bgt\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.079087 4711 status_manager.go:851] "Failed to get status for pod" podUID="3c8ded03-464c-4a85-8468-067607680129" pod="openshift-marketplace/redhat-operators-kqptv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-kqptv\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.079318 4711 status_manager.go:851] "Failed to get status for pod" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" pod="openshift-marketplace/community-operators-shg2k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-shg2k\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.079525 4711 status_manager.go:851] "Failed to get status for pod" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" pod="openshift-marketplace/redhat-operators-885xw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-885xw\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:35 crc kubenswrapper[4711]: E0123 08:24:35.080423 4711 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.220:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.477632 4711 status_manager.go:851] "Failed to get status for pod" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" pod="openshift-marketplace/redhat-marketplace-qq59p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-qq59p\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.478177 4711 status_manager.go:851] "Failed to get status for pod" podUID="f2caef8b-b3d6-4982-a70a-576fbba7ceab" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.478627 4711 status_manager.go:851] "Failed to get status for pod" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" pod="openshift-marketplace/community-operators-q5bgt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-q5bgt\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.478925 4711 status_manager.go:851] "Failed to get status for pod" podUID="3c8ded03-464c-4a85-8468-067607680129" pod="openshift-marketplace/redhat-operators-kqptv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-kqptv\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.479163 4711 status_manager.go:851] "Failed to get status for pod" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" pod="openshift-marketplace/community-operators-shg2k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-shg2k\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:35 crc kubenswrapper[4711]: I0123 08:24:35.479429 4711 status_manager.go:851] "Failed to get status for pod" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" pod="openshift-marketplace/redhat-operators-885xw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-885xw\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:36 crc kubenswrapper[4711]: E0123 08:24:36.082259 4711 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.220:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:24:36 crc kubenswrapper[4711]: E0123 08:24:36.385099 4711 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:36 crc kubenswrapper[4711]: E0123 08:24:36.386359 4711 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:36 crc kubenswrapper[4711]: E0123 08:24:36.387060 4711 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:36 crc kubenswrapper[4711]: E0123 08:24:36.387582 4711 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:36 crc kubenswrapper[4711]: E0123 08:24:36.388179 4711 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:36 crc kubenswrapper[4711]: I0123 08:24:36.388366 4711 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 23 08:24:36 crc kubenswrapper[4711]: E0123 08:24:36.388936 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="200ms" Jan 23 08:24:36 crc kubenswrapper[4711]: E0123 08:24:36.590056 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="400ms" Jan 23 08:24:36 crc kubenswrapper[4711]: E0123 08:24:36.990980 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="800ms" Jan 23 08:24:37 crc kubenswrapper[4711]: E0123 08:24:37.791824 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="1.6s" Jan 23 08:24:38 crc kubenswrapper[4711]: I0123 08:24:38.685343 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:24:38 crc kubenswrapper[4711]: I0123 08:24:38.685838 4711 status_manager.go:851] "Failed to get status for pod" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" pod="openshift-marketplace/redhat-operators-885xw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-885xw\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:38 crc kubenswrapper[4711]: I0123 08:24:38.686256 4711 status_manager.go:851] "Failed to get status for pod" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" pod="openshift-marketplace/certified-operators-9qd4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9qd4n\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:38 crc kubenswrapper[4711]: I0123 08:24:38.686665 4711 status_manager.go:851] "Failed to get status for pod" podUID="f2caef8b-b3d6-4982-a70a-576fbba7ceab" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:38 crc kubenswrapper[4711]: I0123 08:24:38.686915 4711 status_manager.go:851] "Failed to get status for pod" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" pod="openshift-marketplace/redhat-marketplace-qq59p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-qq59p\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:38 crc kubenswrapper[4711]: I0123 08:24:38.687242 4711 status_manager.go:851] "Failed to get status for pod" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" pod="openshift-marketplace/community-operators-q5bgt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-q5bgt\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:38 crc kubenswrapper[4711]: I0123 08:24:38.687603 4711 status_manager.go:851] "Failed to get status for pod" podUID="3c8ded03-464c-4a85-8468-067607680129" pod="openshift-marketplace/redhat-operators-kqptv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-kqptv\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:38 crc kubenswrapper[4711]: I0123 08:24:38.687929 4711 status_manager.go:851] "Failed to get status for pod" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" pod="openshift-marketplace/community-operators-shg2k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-shg2k\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:39 crc kubenswrapper[4711]: E0123 08:24:39.392732 4711 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.220:6443: connect: connection refused" interval="3.2s" Jan 23 08:24:39 crc kubenswrapper[4711]: I0123 08:24:39.473301 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:39 crc kubenswrapper[4711]: I0123 08:24:39.474053 4711 status_manager.go:851] "Failed to get status for pod" podUID="f2caef8b-b3d6-4982-a70a-576fbba7ceab" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:39 crc kubenswrapper[4711]: I0123 08:24:39.474379 4711 status_manager.go:851] "Failed to get status for pod" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" pod="openshift-marketplace/redhat-marketplace-qq59p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-qq59p\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:39 crc kubenswrapper[4711]: I0123 08:24:39.474831 4711 status_manager.go:851] "Failed to get status for pod" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" pod="openshift-marketplace/community-operators-q5bgt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-q5bgt\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:39 crc kubenswrapper[4711]: I0123 08:24:39.475280 4711 status_manager.go:851] "Failed to get status for pod" podUID="3c8ded03-464c-4a85-8468-067607680129" pod="openshift-marketplace/redhat-operators-kqptv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-kqptv\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:39 crc kubenswrapper[4711]: I0123 08:24:39.475570 4711 status_manager.go:851] "Failed to get status for pod" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" pod="openshift-marketplace/community-operators-shg2k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-shg2k\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:39 crc kubenswrapper[4711]: I0123 08:24:39.475938 4711 status_manager.go:851] "Failed to get status for pod" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" pod="openshift-marketplace/redhat-operators-885xw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-885xw\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:39 crc kubenswrapper[4711]: I0123 08:24:39.476305 4711 status_manager.go:851] "Failed to get status for pod" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" pod="openshift-marketplace/certified-operators-9qd4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9qd4n\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:39 crc kubenswrapper[4711]: I0123 08:24:39.485979 4711 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f842319a-e130-4f1e-8aeb-d92f8ad00290" Jan 23 08:24:39 crc kubenswrapper[4711]: I0123 08:24:39.486006 4711 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f842319a-e130-4f1e-8aeb-d92f8ad00290" Jan 23 08:24:39 crc kubenswrapper[4711]: E0123 08:24:39.486495 4711 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:39 crc kubenswrapper[4711]: I0123 08:24:39.487059 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:39 crc kubenswrapper[4711]: W0123 08:24:39.507831 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-6b4b109cf4317ee39e79c9d1c56187d00f545e7ef06365fe88249ba60351f805 WatchSource:0}: Error finding container 6b4b109cf4317ee39e79c9d1c56187d00f545e7ef06365fe88249ba60351f805: Status 404 returned error can't find the container with id 6b4b109cf4317ee39e79c9d1c56187d00f545e7ef06365fe88249ba60351f805 Jan 23 08:24:40 crc kubenswrapper[4711]: I0123 08:24:40.104119 4711 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="36ff37e719e40c247ba18362876b47a7535853e5ff474ca85edf6280d3b7890f" exitCode=0 Jan 23 08:24:40 crc kubenswrapper[4711]: I0123 08:24:40.104209 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"36ff37e719e40c247ba18362876b47a7535853e5ff474ca85edf6280d3b7890f"} Jan 23 08:24:40 crc kubenswrapper[4711]: I0123 08:24:40.104419 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6b4b109cf4317ee39e79c9d1c56187d00f545e7ef06365fe88249ba60351f805"} Jan 23 08:24:40 crc kubenswrapper[4711]: I0123 08:24:40.104732 4711 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f842319a-e130-4f1e-8aeb-d92f8ad00290" Jan 23 08:24:40 crc kubenswrapper[4711]: I0123 08:24:40.104750 4711 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f842319a-e130-4f1e-8aeb-d92f8ad00290" Jan 23 08:24:40 crc kubenswrapper[4711]: I0123 08:24:40.105131 4711 status_manager.go:851] "Failed to get status for pod" podUID="3c8ded03-464c-4a85-8468-067607680129" pod="openshift-marketplace/redhat-operators-kqptv" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-kqptv\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:40 crc kubenswrapper[4711]: E0123 08:24:40.105170 4711 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:40 crc kubenswrapper[4711]: I0123 08:24:40.105398 4711 status_manager.go:851] "Failed to get status for pod" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" pod="openshift-marketplace/community-operators-shg2k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-shg2k\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:40 crc kubenswrapper[4711]: I0123 08:24:40.105683 4711 status_manager.go:851] "Failed to get status for pod" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" pod="openshift-marketplace/redhat-operators-885xw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-885xw\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:40 crc kubenswrapper[4711]: I0123 08:24:40.105941 4711 status_manager.go:851] "Failed to get status for pod" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" pod="openshift-marketplace/certified-operators-9qd4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-9qd4n\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:40 crc kubenswrapper[4711]: I0123 08:24:40.106245 4711 status_manager.go:851] "Failed to get status for pod" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" pod="openshift-marketplace/redhat-marketplace-qq59p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-qq59p\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:40 crc kubenswrapper[4711]: I0123 08:24:40.106609 4711 status_manager.go:851] "Failed to get status for pod" podUID="f2caef8b-b3d6-4982-a70a-576fbba7ceab" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:40 crc kubenswrapper[4711]: I0123 08:24:40.106848 4711 status_manager.go:851] "Failed to get status for pod" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" pod="openshift-marketplace/community-operators-q5bgt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-q5bgt\": dial tcp 38.102.83.220:6443: connect: connection refused" Jan 23 08:24:41 crc kubenswrapper[4711]: I0123 08:24:41.112611 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ae17b08e1c360e2e48931a338904da7b91afb15bffe62e2502f3fffa5ad54cb4"} Jan 23 08:24:41 crc kubenswrapper[4711]: I0123 08:24:41.112670 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"594f31fac3ce7fd6c9444a1fa9d8538320bd0b0c666a98226dda362a914de049"} Jan 23 08:24:41 crc kubenswrapper[4711]: I0123 08:24:41.112683 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"dc9133e7932df4ff30c62da34ef71acb9e49ce2af3d06868b918a7767139a7be"} Jan 23 08:24:42 crc kubenswrapper[4711]: I0123 08:24:42.121110 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 23 08:24:42 crc kubenswrapper[4711]: I0123 08:24:42.121398 4711 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d" exitCode=1 Jan 23 08:24:42 crc kubenswrapper[4711]: I0123 08:24:42.121463 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d"} Jan 23 08:24:42 crc kubenswrapper[4711]: I0123 08:24:42.121973 4711 scope.go:117] "RemoveContainer" containerID="c67c4e2b9627e7babbce048930dd4bd2a732485c1cb347ebd4f57cf86bcf0e8d" Jan 23 08:24:42 crc kubenswrapper[4711]: I0123 08:24:42.124774 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"38c53c413761f51a129263c900291a753f82b97d1a7cf31ec58aeb026c438258"} Jan 23 08:24:42 crc kubenswrapper[4711]: I0123 08:24:42.124821 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ef0f36c9702673bc8ba9550796ce92df29c4b5efff55a1256299fca0a96bdec8"} Jan 23 08:24:42 crc kubenswrapper[4711]: I0123 08:24:42.125085 4711 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f842319a-e130-4f1e-8aeb-d92f8ad00290" Jan 23 08:24:42 crc kubenswrapper[4711]: I0123 08:24:42.125112 4711 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f842319a-e130-4f1e-8aeb-d92f8ad00290" Jan 23 08:24:42 crc kubenswrapper[4711]: I0123 08:24:42.125339 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.135309 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.135688 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"110ed8c673619a4fb6ed420898628d4c7c76582e1af426eafbdf00fd54b9a069"} Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.355330 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" podUID="be1b64cc-b8d5-429c-8189-542268f1d7a2" containerName="oauth-openshift" containerID="cri-o://388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f" gracePeriod=15 Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.683830 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.701557 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-serving-cert\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.701614 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-service-ca\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.701634 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-session\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.701670 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-ocp-branding-template\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.701706 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-provider-selection\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.701757 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzkp8\" (UniqueName: \"kubernetes.io/projected/be1b64cc-b8d5-429c-8189-542268f1d7a2-kube-api-access-tzkp8\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.701785 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-router-certs\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.702003 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-trusted-ca-bundle\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.702058 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-login\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.702083 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/be1b64cc-b8d5-429c-8189-542268f1d7a2-audit-dir\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.702101 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-error\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.702193 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-audit-policies\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.702220 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-idp-0-file-data\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.702239 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-cliconfig\") pod \"be1b64cc-b8d5-429c-8189-542268f1d7a2\" (UID: \"be1b64cc-b8d5-429c-8189-542268f1d7a2\") " Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.702248 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/be1b64cc-b8d5-429c-8189-542268f1d7a2-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.702499 4711 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/be1b64cc-b8d5-429c-8189-542268f1d7a2-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.702898 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.702927 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.703640 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.704124 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.708682 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be1b64cc-b8d5-429c-8189-542268f1d7a2-kube-api-access-tzkp8" (OuterVolumeSpecName: "kube-api-access-tzkp8") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "kube-api-access-tzkp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.708809 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.710096 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.710224 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.711037 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.711383 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.711469 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.711774 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.712147 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "be1b64cc-b8d5-429c-8189-542268f1d7a2" (UID: "be1b64cc-b8d5-429c-8189-542268f1d7a2"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803539 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803570 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803584 4711 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803596 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803606 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803615 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803624 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803633 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803643 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803652 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803662 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzkp8\" (UniqueName: \"kubernetes.io/projected/be1b64cc-b8d5-429c-8189-542268f1d7a2-kube-api-access-tzkp8\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803670 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:43 crc kubenswrapper[4711]: I0123 08:24:43.803678 4711 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/be1b64cc-b8d5-429c-8189-542268f1d7a2-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:24:44 crc kubenswrapper[4711]: I0123 08:24:44.142479 4711 generic.go:334] "Generic (PLEG): container finished" podID="be1b64cc-b8d5-429c-8189-542268f1d7a2" containerID="388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f" exitCode=0 Jan 23 08:24:44 crc kubenswrapper[4711]: I0123 08:24:44.142535 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" event={"ID":"be1b64cc-b8d5-429c-8189-542268f1d7a2","Type":"ContainerDied","Data":"388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f"} Jan 23 08:24:44 crc kubenswrapper[4711]: I0123 08:24:44.142554 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" Jan 23 08:24:44 crc kubenswrapper[4711]: I0123 08:24:44.142571 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-q5hjq" event={"ID":"be1b64cc-b8d5-429c-8189-542268f1d7a2","Type":"ContainerDied","Data":"c19aba178eeb91fd573e323012cf4609cf3fd506f4d40d7a3350fd7661c0845d"} Jan 23 08:24:44 crc kubenswrapper[4711]: I0123 08:24:44.142593 4711 scope.go:117] "RemoveContainer" containerID="388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f" Jan 23 08:24:44 crc kubenswrapper[4711]: I0123 08:24:44.160239 4711 scope.go:117] "RemoveContainer" containerID="388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f" Jan 23 08:24:44 crc kubenswrapper[4711]: E0123 08:24:44.160661 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f\": container with ID starting with 388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f not found: ID does not exist" containerID="388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f" Jan 23 08:24:44 crc kubenswrapper[4711]: I0123 08:24:44.160700 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f"} err="failed to get container status \"388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f\": rpc error: code = NotFound desc = could not find container \"388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f\": container with ID starting with 388265ffbae000e3a9f89152332a84b17c6f8c33d9cadaa6b5940e02ab9e5b9f not found: ID does not exist" Jan 23 08:24:44 crc kubenswrapper[4711]: I0123 08:24:44.487421 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:44 crc kubenswrapper[4711]: I0123 08:24:44.487475 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:44 crc kubenswrapper[4711]: I0123 08:24:44.492842 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:45 crc kubenswrapper[4711]: I0123 08:24:45.726486 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:24:45 crc kubenswrapper[4711]: I0123 08:24:45.730283 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:24:46 crc kubenswrapper[4711]: I0123 08:24:46.154312 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:24:47 crc kubenswrapper[4711]: I0123 08:24:47.136710 4711 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:47 crc kubenswrapper[4711]: I0123 08:24:47.159174 4711 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f842319a-e130-4f1e-8aeb-d92f8ad00290" Jan 23 08:24:47 crc kubenswrapper[4711]: I0123 08:24:47.159217 4711 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f842319a-e130-4f1e-8aeb-d92f8ad00290" Jan 23 08:24:47 crc kubenswrapper[4711]: I0123 08:24:47.163852 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:47 crc kubenswrapper[4711]: I0123 08:24:47.357879 4711 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c4f8bcd0-fb4e-427d-a224-15bfddcfc605" Jan 23 08:24:48 crc kubenswrapper[4711]: I0123 08:24:48.164666 4711 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f842319a-e130-4f1e-8aeb-d92f8ad00290" Jan 23 08:24:48 crc kubenswrapper[4711]: I0123 08:24:48.164968 4711 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f842319a-e130-4f1e-8aeb-d92f8ad00290" Jan 23 08:24:48 crc kubenswrapper[4711]: I0123 08:24:48.167195 4711 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c4f8bcd0-fb4e-427d-a224-15bfddcfc605" Jan 23 08:24:53 crc kubenswrapper[4711]: I0123 08:24:53.151190 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 23 08:24:53 crc kubenswrapper[4711]: I0123 08:24:53.232188 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 23 08:24:53 crc kubenswrapper[4711]: I0123 08:24:53.406850 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 23 08:24:53 crc kubenswrapper[4711]: I0123 08:24:53.407019 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 23 08:24:54 crc kubenswrapper[4711]: I0123 08:24:54.064312 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 23 08:24:54 crc kubenswrapper[4711]: I0123 08:24:54.146958 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 23 08:24:54 crc kubenswrapper[4711]: I0123 08:24:54.306576 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 23 08:24:54 crc kubenswrapper[4711]: I0123 08:24:54.507867 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 23 08:24:54 crc kubenswrapper[4711]: I0123 08:24:54.652563 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 23 08:24:54 crc kubenswrapper[4711]: I0123 08:24:54.688686 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 23 08:24:54 crc kubenswrapper[4711]: I0123 08:24:54.718573 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 23 08:24:54 crc kubenswrapper[4711]: I0123 08:24:54.845033 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 23 08:24:55 crc kubenswrapper[4711]: I0123 08:24:55.126279 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 23 08:24:55 crc kubenswrapper[4711]: I0123 08:24:55.180686 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 23 08:24:55 crc kubenswrapper[4711]: I0123 08:24:55.202625 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 23 08:24:55 crc kubenswrapper[4711]: I0123 08:24:55.467999 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 23 08:24:55 crc kubenswrapper[4711]: I0123 08:24:55.506476 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 23 08:24:55 crc kubenswrapper[4711]: I0123 08:24:55.750326 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 23 08:24:55 crc kubenswrapper[4711]: I0123 08:24:55.784820 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 23 08:24:55 crc kubenswrapper[4711]: I0123 08:24:55.824593 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 23 08:24:55 crc kubenswrapper[4711]: I0123 08:24:55.937061 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 23 08:24:56 crc kubenswrapper[4711]: I0123 08:24:56.024081 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 23 08:24:56 crc kubenswrapper[4711]: I0123 08:24:56.477560 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:56.704009 4711 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:56.708214 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-q5hjq","openshift-kube-apiserver/kube-apiserver-crc"] Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:56.708271 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:56.712039 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:56.712345 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:56.737358 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=9.737338113 podStartE2EDuration="9.737338113s" podCreationTimestamp="2026-01-23 08:24:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:24:56.734919704 +0000 UTC m=+282.307876072" watchObservedRunningTime="2026-01-23 08:24:56.737338113 +0000 UTC m=+282.310294491" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:56.750684 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:56.807170 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:57.182052 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:57.207484 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:57.285772 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:57.343128 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:57.377449 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:57.389326 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:57.481991 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be1b64cc-b8d5-429c-8189-542268f1d7a2" path="/var/lib/kubelet/pods/be1b64cc-b8d5-429c-8189-542268f1d7a2/volumes" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:57.489949 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:57.614659 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:57.714134 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 23 08:24:57 crc kubenswrapper[4711]: I0123 08:24:57.842140 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 23 08:24:58 crc kubenswrapper[4711]: I0123 08:24:58.089998 4711 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 23 08:24:58 crc kubenswrapper[4711]: I0123 08:24:58.090559 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://32f33ea11baca093aa2bc580070509d1e686b817bf475debed793dacd53e886b" gracePeriod=5 Jan 23 08:24:58 crc kubenswrapper[4711]: I0123 08:24:58.122058 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 23 08:24:58 crc kubenswrapper[4711]: I0123 08:24:58.172739 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 23 08:24:58 crc kubenswrapper[4711]: I0123 08:24:58.303142 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 23 08:24:58 crc kubenswrapper[4711]: I0123 08:24:58.327433 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 23 08:24:58 crc kubenswrapper[4711]: I0123 08:24:58.369320 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 23 08:24:58 crc kubenswrapper[4711]: I0123 08:24:58.491379 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 23 08:24:58 crc kubenswrapper[4711]: I0123 08:24:58.798087 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 23 08:24:58 crc kubenswrapper[4711]: I0123 08:24:58.853913 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 23 08:24:59 crc kubenswrapper[4711]: I0123 08:24:59.027657 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 23 08:24:59 crc kubenswrapper[4711]: I0123 08:24:59.092067 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 23 08:24:59 crc kubenswrapper[4711]: I0123 08:24:59.092739 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 23 08:24:59 crc kubenswrapper[4711]: I0123 08:24:59.176012 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 23 08:24:59 crc kubenswrapper[4711]: I0123 08:24:59.364270 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 23 08:24:59 crc kubenswrapper[4711]: I0123 08:24:59.425088 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 23 08:24:59 crc kubenswrapper[4711]: I0123 08:24:59.554457 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 23 08:24:59 crc kubenswrapper[4711]: I0123 08:24:59.784987 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 23 08:24:59 crc kubenswrapper[4711]: I0123 08:24:59.836608 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 23 08:25:00 crc kubenswrapper[4711]: I0123 08:25:00.082482 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 23 08:25:00 crc kubenswrapper[4711]: I0123 08:25:00.156743 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 23 08:25:00 crc kubenswrapper[4711]: I0123 08:25:00.317204 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 23 08:25:00 crc kubenswrapper[4711]: I0123 08:25:00.709305 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 23 08:25:00 crc kubenswrapper[4711]: I0123 08:25:00.737661 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 23 08:25:00 crc kubenswrapper[4711]: I0123 08:25:00.858485 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 23 08:25:00 crc kubenswrapper[4711]: I0123 08:25:00.913679 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 23 08:25:00 crc kubenswrapper[4711]: I0123 08:25:00.948128 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 23 08:25:00 crc kubenswrapper[4711]: I0123 08:25:00.994715 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 23 08:25:01 crc kubenswrapper[4711]: I0123 08:25:01.261721 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 23 08:25:01 crc kubenswrapper[4711]: I0123 08:25:01.564941 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 23 08:25:01 crc kubenswrapper[4711]: I0123 08:25:01.583746 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 23 08:25:01 crc kubenswrapper[4711]: I0123 08:25:01.701407 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 23 08:25:02 crc kubenswrapper[4711]: I0123 08:25:02.211037 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 23 08:25:02 crc kubenswrapper[4711]: I0123 08:25:02.606684 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 23 08:25:02 crc kubenswrapper[4711]: I0123 08:25:02.623051 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 23 08:25:02 crc kubenswrapper[4711]: I0123 08:25:02.981856 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 23 08:25:03 crc kubenswrapper[4711]: I0123 08:25:03.020566 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 23 08:25:03 crc kubenswrapper[4711]: I0123 08:25:03.114786 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:03.159406 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:03.179101 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:03.272611 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:03.337401 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:03.429596 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:03.642758 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:03.696932 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:03.798557 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:03.807659 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.121240 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.243201 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.257282 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.257336 4711 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="32f33ea11baca093aa2bc580070509d1e686b817bf475debed793dacd53e886b" exitCode=137 Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.321826 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.436534 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.436608 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.458795 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.458908 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.458936 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.458956 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.458969 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.459028 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.459031 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.459061 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.459090 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.459308 4711 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.459320 4711 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.459330 4711 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.459338 4711 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.466026 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.466440 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.472870 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.482970 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.519071 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.554361 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.560147 4711 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.562153 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.581647 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.664436 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.700256 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.713275 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.760102 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.810849 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.844323 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.874522 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.903703 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 23 08:25:04 crc kubenswrapper[4711]: I0123 08:25:04.938880 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.082748 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.153017 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.203179 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.221836 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.263035 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.263102 4711 scope.go:117] "RemoveContainer" containerID="32f33ea11baca093aa2bc580070509d1e686b817bf475debed793dacd53e886b" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.263169 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.377983 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.425827 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.452478 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.460718 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.481358 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.494573 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.506760 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.506914 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.531671 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.709841 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.742903 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.774820 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.886019 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 23 08:25:05 crc kubenswrapper[4711]: I0123 08:25:05.971782 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.002398 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.015497 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.089401 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.291265 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.352590 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.421646 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.422084 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.429660 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.526900 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.545110 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.561223 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.601922 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.612187 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.742458 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.881784 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.888912 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.889733 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.972254 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 23 08:25:06 crc kubenswrapper[4711]: I0123 08:25:06.972254 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 23 08:25:07 crc kubenswrapper[4711]: I0123 08:25:07.136109 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 23 08:25:07 crc kubenswrapper[4711]: I0123 08:25:07.245595 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 23 08:25:07 crc kubenswrapper[4711]: I0123 08:25:07.257396 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 23 08:25:07 crc kubenswrapper[4711]: I0123 08:25:07.290118 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 23 08:25:07 crc kubenswrapper[4711]: I0123 08:25:07.643592 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 23 08:25:07 crc kubenswrapper[4711]: I0123 08:25:07.699813 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 23 08:25:07 crc kubenswrapper[4711]: I0123 08:25:07.939307 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 23 08:25:07 crc kubenswrapper[4711]: I0123 08:25:07.991256 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.059642 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.072304 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.176297 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.187121 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.392843 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.496227 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.510685 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.576789 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.612997 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.724088 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.830623 4711 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.850939 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.906462 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.967451 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 23 08:25:08 crc kubenswrapper[4711]: I0123 08:25:08.984459 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 23 08:25:09 crc kubenswrapper[4711]: I0123 08:25:09.124435 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 23 08:25:09 crc kubenswrapper[4711]: I0123 08:25:09.178748 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 23 08:25:09 crc kubenswrapper[4711]: I0123 08:25:09.203568 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 23 08:25:09 crc kubenswrapper[4711]: I0123 08:25:09.321440 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 23 08:25:09 crc kubenswrapper[4711]: I0123 08:25:09.441156 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 23 08:25:09 crc kubenswrapper[4711]: I0123 08:25:09.460742 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 23 08:25:09 crc kubenswrapper[4711]: I0123 08:25:09.510454 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 23 08:25:09 crc kubenswrapper[4711]: I0123 08:25:09.611544 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 23 08:25:09 crc kubenswrapper[4711]: I0123 08:25:09.705162 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 23 08:25:09 crc kubenswrapper[4711]: I0123 08:25:09.804659 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 23 08:25:09 crc kubenswrapper[4711]: I0123 08:25:09.841034 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.015053 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.086228 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.188287 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.376072 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.378652 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.389040 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.490524 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.514956 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.586732 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.595312 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.631437 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.722110 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.781207 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.821846 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.878783 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 23 08:25:10 crc kubenswrapper[4711]: I0123 08:25:10.942151 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.064889 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.095082 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.112964 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.274798 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.347922 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.353186 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.402769 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.517344 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.522325 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.576059 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.619005 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.639226 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.712201 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.739780 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.750192 4711 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.801499 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 23 08:25:11 crc kubenswrapper[4711]: I0123 08:25:11.816073 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.013717 4711 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.132636 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.246343 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.259427 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.369975 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.396149 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.482728 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.499675 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.561221 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.561945 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.670980 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.689927 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.765810 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.838914 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 23 08:25:12 crc kubenswrapper[4711]: I0123 08:25:12.957869 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.034691 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.085617 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.162818 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.171633 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.555910 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.786922 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-747bd66b49-l44gt"] Jan 23 08:25:13 crc kubenswrapper[4711]: E0123 08:25:13.787222 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2caef8b-b3d6-4982-a70a-576fbba7ceab" containerName="installer" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.787243 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2caef8b-b3d6-4982-a70a-576fbba7ceab" containerName="installer" Jan 23 08:25:13 crc kubenswrapper[4711]: E0123 08:25:13.787253 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.787260 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 23 08:25:13 crc kubenswrapper[4711]: E0123 08:25:13.787276 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be1b64cc-b8d5-429c-8189-542268f1d7a2" containerName="oauth-openshift" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.787283 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="be1b64cc-b8d5-429c-8189-542268f1d7a2" containerName="oauth-openshift" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.787416 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="be1b64cc-b8d5-429c-8189-542268f1d7a2" containerName="oauth-openshift" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.787431 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2caef8b-b3d6-4982-a70a-576fbba7ceab" containerName="installer" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.787442 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.787903 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.791358 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.791550 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.792593 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.792632 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.792693 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.792863 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.792884 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.792899 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.793299 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.794476 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.795093 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.795411 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.800382 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-747bd66b49-l44gt"] Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.807591 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.814139 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.821978 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.835415 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.843213 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.982464 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.982532 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmc8l\" (UniqueName: \"kubernetes.io/projected/36a39753-4c97-483c-990d-50f6f9718ea2-kube-api-access-dmc8l\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.982565 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.982981 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-router-certs\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.983064 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/36a39753-4c97-483c-990d-50f6f9718ea2-audit-policies\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.983130 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.983150 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-user-template-error\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.983199 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/36a39753-4c97-483c-990d-50f6f9718ea2-audit-dir\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.983233 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-service-ca\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.983254 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-user-template-login\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.983288 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.983343 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.983361 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-session\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:13 crc kubenswrapper[4711]: I0123 08:25:13.983405 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.039403 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.075434 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.083977 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084047 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084072 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmc8l\" (UniqueName: \"kubernetes.io/projected/36a39753-4c97-483c-990d-50f6f9718ea2-kube-api-access-dmc8l\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084104 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084131 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-router-certs\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084191 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/36a39753-4c97-483c-990d-50f6f9718ea2-audit-policies\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084219 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084249 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-user-template-error\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084277 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/36a39753-4c97-483c-990d-50f6f9718ea2-audit-dir\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084304 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-service-ca\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084325 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-user-template-login\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084358 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084398 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084418 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-session\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.084546 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/36a39753-4c97-483c-990d-50f6f9718ea2-audit-dir\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.085166 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/36a39753-4c97-483c-990d-50f6f9718ea2-audit-policies\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.085180 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.085210 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.085180 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-service-ca\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.090208 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.090282 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-session\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.090501 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-router-certs\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.090645 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-user-template-error\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.091893 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.092570 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-user-template-login\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.093165 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.102492 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/36a39753-4c97-483c-990d-50f6f9718ea2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.103726 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmc8l\" (UniqueName: \"kubernetes.io/projected/36a39753-4c97-483c-990d-50f6f9718ea2-kube-api-access-dmc8l\") pod \"oauth-openshift-747bd66b49-l44gt\" (UID: \"36a39753-4c97-483c-990d-50f6f9718ea2\") " pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.106806 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.233317 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.508803 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-747bd66b49-l44gt"] Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.511694 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.570781 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.686034 4711 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.753974 4711 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 23 08:25:14 crc kubenswrapper[4711]: I0123 08:25:14.908338 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 23 08:25:15 crc kubenswrapper[4711]: I0123 08:25:15.316908 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" event={"ID":"36a39753-4c97-483c-990d-50f6f9718ea2","Type":"ContainerStarted","Data":"b50778965d135a0c4ef68d956a10e3c87f212786a910bd81f29c23cb0d56193c"} Jan 23 08:25:15 crc kubenswrapper[4711]: I0123 08:25:15.316950 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" event={"ID":"36a39753-4c97-483c-990d-50f6f9718ea2","Type":"ContainerStarted","Data":"2841ba72bd0a68a74591b8ff25c842a430712bfb65b84b3e07c7ae507c0b2815"} Jan 23 08:25:15 crc kubenswrapper[4711]: I0123 08:25:15.317139 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:15 crc kubenswrapper[4711]: I0123 08:25:15.321846 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" Jan 23 08:25:15 crc kubenswrapper[4711]: I0123 08:25:15.336666 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-747bd66b49-l44gt" podStartSLOduration=57.336652524 podStartE2EDuration="57.336652524s" podCreationTimestamp="2026-01-23 08:24:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:25:15.336318066 +0000 UTC m=+300.909274434" watchObservedRunningTime="2026-01-23 08:25:15.336652524 +0000 UTC m=+300.909608882" Jan 23 08:25:15 crc kubenswrapper[4711]: I0123 08:25:15.489310 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 23 08:25:15 crc kubenswrapper[4711]: I0123 08:25:15.562675 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 23 08:25:15 crc kubenswrapper[4711]: I0123 08:25:15.722471 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 23 08:25:15 crc kubenswrapper[4711]: I0123 08:25:15.791782 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 23 08:25:15 crc kubenswrapper[4711]: I0123 08:25:15.899983 4711 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.880776 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9qd4n"] Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.881714 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9qd4n" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" containerName="registry-server" containerID="cri-o://da240b8885ee249428d29acbcca2a09d0ef6adeb5bedbf84c08e6707aecb91d1" gracePeriod=30 Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.885329 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q5bgt"] Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.885635 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-q5bgt" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" containerName="registry-server" containerID="cri-o://d201da90ae40402dfe94d1564ffb7b432cfed3f65f708eb302d38104df07574a" gracePeriod=30 Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.895262 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-shg2k"] Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.895590 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-shg2k" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" containerName="registry-server" containerID="cri-o://6bf029d6e69febaae87ae3a440c7a5630dd2120cfbd6f99db0fe2f896ee02e4b" gracePeriod=30 Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.898236 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rhrsx"] Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.898437 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" podUID="8b4e79a6-ff8b-4293-931b-bde9f25b7576" containerName="marketplace-operator" containerID="cri-o://f747b0088ff8904abc130442a0b31eb0267e13f212d15cd048f15a78b8251ff2" gracePeriod=30 Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.911732 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xzhz"] Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.911993 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7xzhz" podUID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" containerName="registry-server" containerID="cri-o://8ff61d4563602564033e609655da6c2bdeef004c75029401ac22e52ede6f9268" gracePeriod=30 Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.917761 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qq59p"] Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.917997 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qq59p" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" containerName="registry-server" containerID="cri-o://aafaec601ef4340da071c13ae3b0d6eb58e8c9a27b55af2de7dcf0248244fccf" gracePeriod=30 Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.938118 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hzv6z"] Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.938853 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.954480 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-885xw"] Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.954715 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-885xw" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" containerName="registry-server" containerID="cri-o://7ed25de54c7ac3f30e02bac19b14adfa13f4b548b199038259df1e4936d6aec3" gracePeriod=30 Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.961066 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kqptv"] Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.961331 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kqptv" podUID="3c8ded03-464c-4a85-8468-067607680129" containerName="registry-server" containerID="cri-o://dbe9cb549bee3b6070e10ca9a47cc03f0babb2de19acd8068614105f6fa7154a" gracePeriod=30 Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.964959 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hzv6z"] Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.968619 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sprrs\" (UniqueName: \"kubernetes.io/projected/a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520-kube-api-access-sprrs\") pod \"marketplace-operator-79b997595-hzv6z\" (UID: \"a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.968656 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hzv6z\" (UID: \"a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:27 crc kubenswrapper[4711]: I0123 08:25:27.968711 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hzv6z\" (UID: \"a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.070292 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sprrs\" (UniqueName: \"kubernetes.io/projected/a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520-kube-api-access-sprrs\") pod \"marketplace-operator-79b997595-hzv6z\" (UID: \"a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.070335 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hzv6z\" (UID: \"a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.070391 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hzv6z\" (UID: \"a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.071891 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hzv6z\" (UID: \"a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.079940 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hzv6z\" (UID: \"a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.093328 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sprrs\" (UniqueName: \"kubernetes.io/projected/a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520-kube-api-access-sprrs\") pod \"marketplace-operator-79b997595-hzv6z\" (UID: \"a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.254942 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.400036 4711 generic.go:334] "Generic (PLEG): container finished" podID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" containerID="aafaec601ef4340da071c13ae3b0d6eb58e8c9a27b55af2de7dcf0248244fccf" exitCode=0 Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.400127 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qq59p" event={"ID":"0a16e14b-953f-491c-9986-b5bafcf8cd0b","Type":"ContainerDied","Data":"aafaec601ef4340da071c13ae3b0d6eb58e8c9a27b55af2de7dcf0248244fccf"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.400178 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qq59p" event={"ID":"0a16e14b-953f-491c-9986-b5bafcf8cd0b","Type":"ContainerDied","Data":"7dfb5fe2248f72147abc0bfc212fa50177980e11e221e829de5c39e0d2e68522"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.400191 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7dfb5fe2248f72147abc0bfc212fa50177980e11e221e829de5c39e0d2e68522" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.402546 4711 generic.go:334] "Generic (PLEG): container finished" podID="ac8209ab-0e14-4fed-9dcc-0978176748a1" containerID="d201da90ae40402dfe94d1564ffb7b432cfed3f65f708eb302d38104df07574a" exitCode=0 Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.402621 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q5bgt" event={"ID":"ac8209ab-0e14-4fed-9dcc-0978176748a1","Type":"ContainerDied","Data":"d201da90ae40402dfe94d1564ffb7b432cfed3f65f708eb302d38104df07574a"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.402649 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q5bgt" event={"ID":"ac8209ab-0e14-4fed-9dcc-0978176748a1","Type":"ContainerDied","Data":"3d220ab63a656417a68dc9ba2c2057e53fb3bdd2f9db8350e127fafcf5ce30a7"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.402663 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d220ab63a656417a68dc9ba2c2057e53fb3bdd2f9db8350e127fafcf5ce30a7" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.403854 4711 generic.go:334] "Generic (PLEG): container finished" podID="8b4e79a6-ff8b-4293-931b-bde9f25b7576" containerID="f747b0088ff8904abc130442a0b31eb0267e13f212d15cd048f15a78b8251ff2" exitCode=0 Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.403918 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" event={"ID":"8b4e79a6-ff8b-4293-931b-bde9f25b7576","Type":"ContainerDied","Data":"f747b0088ff8904abc130442a0b31eb0267e13f212d15cd048f15a78b8251ff2"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.403945 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" event={"ID":"8b4e79a6-ff8b-4293-931b-bde9f25b7576","Type":"ContainerDied","Data":"ffa90f8803ff0f5a8c74c6101f11246783a21fb7d64001c90dfa91a3b71a7834"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.403957 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ffa90f8803ff0f5a8c74c6101f11246783a21fb7d64001c90dfa91a3b71a7834" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.406369 4711 generic.go:334] "Generic (PLEG): container finished" podID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" containerID="8ff61d4563602564033e609655da6c2bdeef004c75029401ac22e52ede6f9268" exitCode=0 Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.406418 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzhz" event={"ID":"8f17fdf7-38ee-4c60-a0e9-e293cdd77830","Type":"ContainerDied","Data":"8ff61d4563602564033e609655da6c2bdeef004c75029401ac22e52ede6f9268"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.406436 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xzhz" event={"ID":"8f17fdf7-38ee-4c60-a0e9-e293cdd77830","Type":"ContainerDied","Data":"f7b57397b7b3f07d8b20725952f7ecc2c8bd247f6c8ee9a775da9ac9c6838ae8"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.406447 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7b57397b7b3f07d8b20725952f7ecc2c8bd247f6c8ee9a775da9ac9c6838ae8" Jan 23 08:25:28 crc kubenswrapper[4711]: E0123 08:25:28.410955 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f17fdf7_38ee_4c60_a0e9_e293cdd77830.slice/crio-8ff61d4563602564033e609655da6c2bdeef004c75029401ac22e52ede6f9268.scope\": RecentStats: unable to find data in memory cache]" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.417013 4711 generic.go:334] "Generic (PLEG): container finished" podID="3c8ded03-464c-4a85-8468-067607680129" containerID="dbe9cb549bee3b6070e10ca9a47cc03f0babb2de19acd8068614105f6fa7154a" exitCode=0 Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.417087 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqptv" event={"ID":"3c8ded03-464c-4a85-8468-067607680129","Type":"ContainerDied","Data":"dbe9cb549bee3b6070e10ca9a47cc03f0babb2de19acd8068614105f6fa7154a"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.422007 4711 generic.go:334] "Generic (PLEG): container finished" podID="9ef23d02-cc65-4020-897b-3e114c07d801" containerID="7ed25de54c7ac3f30e02bac19b14adfa13f4b548b199038259df1e4936d6aec3" exitCode=0 Jan 23 08:25:28 crc kubenswrapper[4711]: E0123 08:25:28.422019 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6bf029d6e69febaae87ae3a440c7a5630dd2120cfbd6f99db0fe2f896ee02e4b is running failed: container process not found" containerID="6bf029d6e69febaae87ae3a440c7a5630dd2120cfbd6f99db0fe2f896ee02e4b" cmd=["grpc_health_probe","-addr=:50051"] Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.422095 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-885xw" event={"ID":"9ef23d02-cc65-4020-897b-3e114c07d801","Type":"ContainerDied","Data":"7ed25de54c7ac3f30e02bac19b14adfa13f4b548b199038259df1e4936d6aec3"} Jan 23 08:25:28 crc kubenswrapper[4711]: E0123 08:25:28.422764 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6bf029d6e69febaae87ae3a440c7a5630dd2120cfbd6f99db0fe2f896ee02e4b is running failed: container process not found" containerID="6bf029d6e69febaae87ae3a440c7a5630dd2120cfbd6f99db0fe2f896ee02e4b" cmd=["grpc_health_probe","-addr=:50051"] Jan 23 08:25:28 crc kubenswrapper[4711]: E0123 08:25:28.423000 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6bf029d6e69febaae87ae3a440c7a5630dd2120cfbd6f99db0fe2f896ee02e4b is running failed: container process not found" containerID="6bf029d6e69febaae87ae3a440c7a5630dd2120cfbd6f99db0fe2f896ee02e4b" cmd=["grpc_health_probe","-addr=:50051"] Jan 23 08:25:28 crc kubenswrapper[4711]: E0123 08:25:28.423050 4711 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6bf029d6e69febaae87ae3a440c7a5630dd2120cfbd6f99db0fe2f896ee02e4b is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-shg2k" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" containerName="registry-server" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.424453 4711 generic.go:334] "Generic (PLEG): container finished" podID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" containerID="da240b8885ee249428d29acbcca2a09d0ef6adeb5bedbf84c08e6707aecb91d1" exitCode=0 Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.424541 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9qd4n" event={"ID":"dc70bbe1-6f64-4501-b4cd-afd381a50e86","Type":"ContainerDied","Data":"da240b8885ee249428d29acbcca2a09d0ef6adeb5bedbf84c08e6707aecb91d1"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.424576 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9qd4n" event={"ID":"dc70bbe1-6f64-4501-b4cd-afd381a50e86","Type":"ContainerDied","Data":"cc7b97520541c492eb3ef7a4fd5143c6533366a95d881127774eca463084a523"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.424590 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc7b97520541c492eb3ef7a4fd5143c6533366a95d881127774eca463084a523" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.427333 4711 generic.go:334] "Generic (PLEG): container finished" podID="303af9ef-3014-4b33-ba8e-f6b4a9227485" containerID="6bf029d6e69febaae87ae3a440c7a5630dd2120cfbd6f99db0fe2f896ee02e4b" exitCode=0 Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.427372 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shg2k" event={"ID":"303af9ef-3014-4b33-ba8e-f6b4a9227485","Type":"ContainerDied","Data":"6bf029d6e69febaae87ae3a440c7a5630dd2120cfbd6f99db0fe2f896ee02e4b"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.427397 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shg2k" event={"ID":"303af9ef-3014-4b33-ba8e-f6b4a9227485","Type":"ContainerDied","Data":"d80e561ae6963985ee3c71c36b0ac91dfd903475c5accd3c5e70d3816c5e8d81"} Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.427410 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d80e561ae6963985ee3c71c36b0ac91dfd903475c5accd3c5e70d3816c5e8d81" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.458987 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hzv6z"] Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.500324 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.516741 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.524812 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.532564 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.546292 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.550075 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.555329 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.579782 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq9gk\" (UniqueName: \"kubernetes.io/projected/8b4e79a6-ff8b-4293-931b-bde9f25b7576-kube-api-access-mq9gk\") pod \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\" (UID: \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.579978 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ef23d02-cc65-4020-897b-3e114c07d801-catalog-content\") pod \"9ef23d02-cc65-4020-897b-3e114c07d801\" (UID: \"9ef23d02-cc65-4020-897b-3e114c07d801\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580086 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xd88\" (UniqueName: \"kubernetes.io/projected/ac8209ab-0e14-4fed-9dcc-0978176748a1-kube-api-access-5xd88\") pod \"ac8209ab-0e14-4fed-9dcc-0978176748a1\" (UID: \"ac8209ab-0e14-4fed-9dcc-0978176748a1\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580140 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303af9ef-3014-4b33-ba8e-f6b4a9227485-utilities\") pod \"303af9ef-3014-4b33-ba8e-f6b4a9227485\" (UID: \"303af9ef-3014-4b33-ba8e-f6b4a9227485\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580172 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc70bbe1-6f64-4501-b4cd-afd381a50e86-utilities\") pod \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\" (UID: \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580192 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5qdz\" (UniqueName: \"kubernetes.io/projected/dc70bbe1-6f64-4501-b4cd-afd381a50e86-kube-api-access-s5qdz\") pod \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\" (UID: \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580271 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a16e14b-953f-491c-9986-b5bafcf8cd0b-utilities\") pod \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\" (UID: \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580293 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8mls\" (UniqueName: \"kubernetes.io/projected/303af9ef-3014-4b33-ba8e-f6b4a9227485-kube-api-access-w8mls\") pod \"303af9ef-3014-4b33-ba8e-f6b4a9227485\" (UID: \"303af9ef-3014-4b33-ba8e-f6b4a9227485\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580335 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-utilities\") pod \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\" (UID: \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580358 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a16e14b-953f-491c-9986-b5bafcf8cd0b-catalog-content\") pod \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\" (UID: \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580410 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-catalog-content\") pod \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\" (UID: \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580435 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8b4e79a6-ff8b-4293-931b-bde9f25b7576-marketplace-operator-metrics\") pod \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\" (UID: \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580482 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc70bbe1-6f64-4501-b4cd-afd381a50e86-catalog-content\") pod \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\" (UID: \"dc70bbe1-6f64-4501-b4cd-afd381a50e86\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580534 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9w8v8\" (UniqueName: \"kubernetes.io/projected/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-kube-api-access-9w8v8\") pod \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\" (UID: \"8f17fdf7-38ee-4c60-a0e9-e293cdd77830\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580557 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2c8fv\" (UniqueName: \"kubernetes.io/projected/9ef23d02-cc65-4020-897b-3e114c07d801-kube-api-access-2c8fv\") pod \"9ef23d02-cc65-4020-897b-3e114c07d801\" (UID: \"9ef23d02-cc65-4020-897b-3e114c07d801\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580577 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ef23d02-cc65-4020-897b-3e114c07d801-utilities\") pod \"9ef23d02-cc65-4020-897b-3e114c07d801\" (UID: \"9ef23d02-cc65-4020-897b-3e114c07d801\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580627 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303af9ef-3014-4b33-ba8e-f6b4a9227485-catalog-content\") pod \"303af9ef-3014-4b33-ba8e-f6b4a9227485\" (UID: \"303af9ef-3014-4b33-ba8e-f6b4a9227485\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580912 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac8209ab-0e14-4fed-9dcc-0978176748a1-catalog-content\") pod \"ac8209ab-0e14-4fed-9dcc-0978176748a1\" (UID: \"ac8209ab-0e14-4fed-9dcc-0978176748a1\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.580935 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac8209ab-0e14-4fed-9dcc-0978176748a1-utilities\") pod \"ac8209ab-0e14-4fed-9dcc-0978176748a1\" (UID: \"ac8209ab-0e14-4fed-9dcc-0978176748a1\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.581066 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8b4e79a6-ff8b-4293-931b-bde9f25b7576-marketplace-trusted-ca\") pod \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\" (UID: \"8b4e79a6-ff8b-4293-931b-bde9f25b7576\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.581161 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jkvl\" (UniqueName: \"kubernetes.io/projected/0a16e14b-953f-491c-9986-b5bafcf8cd0b-kube-api-access-9jkvl\") pod \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\" (UID: \"0a16e14b-953f-491c-9986-b5bafcf8cd0b\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.582190 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/303af9ef-3014-4b33-ba8e-f6b4a9227485-utilities" (OuterVolumeSpecName: "utilities") pod "303af9ef-3014-4b33-ba8e-f6b4a9227485" (UID: "303af9ef-3014-4b33-ba8e-f6b4a9227485"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.582516 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a16e14b-953f-491c-9986-b5bafcf8cd0b-utilities" (OuterVolumeSpecName: "utilities") pod "0a16e14b-953f-491c-9986-b5bafcf8cd0b" (UID: "0a16e14b-953f-491c-9986-b5bafcf8cd0b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.583299 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc70bbe1-6f64-4501-b4cd-afd381a50e86-utilities" (OuterVolumeSpecName: "utilities") pod "dc70bbe1-6f64-4501-b4cd-afd381a50e86" (UID: "dc70bbe1-6f64-4501-b4cd-afd381a50e86"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.584425 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-utilities" (OuterVolumeSpecName: "utilities") pod "8f17fdf7-38ee-4c60-a0e9-e293cdd77830" (UID: "8f17fdf7-38ee-4c60-a0e9-e293cdd77830"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.585988 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ef23d02-cc65-4020-897b-3e114c07d801-utilities" (OuterVolumeSpecName: "utilities") pod "9ef23d02-cc65-4020-897b-3e114c07d801" (UID: "9ef23d02-cc65-4020-897b-3e114c07d801"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.586620 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac8209ab-0e14-4fed-9dcc-0978176748a1-utilities" (OuterVolumeSpecName: "utilities") pod "ac8209ab-0e14-4fed-9dcc-0978176748a1" (UID: "ac8209ab-0e14-4fed-9dcc-0978176748a1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.587439 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b4e79a6-ff8b-4293-931b-bde9f25b7576-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "8b4e79a6-ff8b-4293-931b-bde9f25b7576" (UID: "8b4e79a6-ff8b-4293-931b-bde9f25b7576"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.599106 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc70bbe1-6f64-4501-b4cd-afd381a50e86-kube-api-access-s5qdz" (OuterVolumeSpecName: "kube-api-access-s5qdz") pod "dc70bbe1-6f64-4501-b4cd-afd381a50e86" (UID: "dc70bbe1-6f64-4501-b4cd-afd381a50e86"). InnerVolumeSpecName "kube-api-access-s5qdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.600296 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/303af9ef-3014-4b33-ba8e-f6b4a9227485-kube-api-access-w8mls" (OuterVolumeSpecName: "kube-api-access-w8mls") pod "303af9ef-3014-4b33-ba8e-f6b4a9227485" (UID: "303af9ef-3014-4b33-ba8e-f6b4a9227485"). InnerVolumeSpecName "kube-api-access-w8mls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.609134 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b4e79a6-ff8b-4293-931b-bde9f25b7576-kube-api-access-mq9gk" (OuterVolumeSpecName: "kube-api-access-mq9gk") pod "8b4e79a6-ff8b-4293-931b-bde9f25b7576" (UID: "8b4e79a6-ff8b-4293-931b-bde9f25b7576"). InnerVolumeSpecName "kube-api-access-mq9gk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.609780 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-kube-api-access-9w8v8" (OuterVolumeSpecName: "kube-api-access-9w8v8") pod "8f17fdf7-38ee-4c60-a0e9-e293cdd77830" (UID: "8f17fdf7-38ee-4c60-a0e9-e293cdd77830"). InnerVolumeSpecName "kube-api-access-9w8v8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.611216 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b4e79a6-ff8b-4293-931b-bde9f25b7576-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "8b4e79a6-ff8b-4293-931b-bde9f25b7576" (UID: "8b4e79a6-ff8b-4293-931b-bde9f25b7576"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.611731 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ef23d02-cc65-4020-897b-3e114c07d801-kube-api-access-2c8fv" (OuterVolumeSpecName: "kube-api-access-2c8fv") pod "9ef23d02-cc65-4020-897b-3e114c07d801" (UID: "9ef23d02-cc65-4020-897b-3e114c07d801"). InnerVolumeSpecName "kube-api-access-2c8fv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.612681 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac8209ab-0e14-4fed-9dcc-0978176748a1-kube-api-access-5xd88" (OuterVolumeSpecName: "kube-api-access-5xd88") pod "ac8209ab-0e14-4fed-9dcc-0978176748a1" (UID: "ac8209ab-0e14-4fed-9dcc-0978176748a1"). InnerVolumeSpecName "kube-api-access-5xd88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.619771 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a16e14b-953f-491c-9986-b5bafcf8cd0b-kube-api-access-9jkvl" (OuterVolumeSpecName: "kube-api-access-9jkvl") pod "0a16e14b-953f-491c-9986-b5bafcf8cd0b" (UID: "0a16e14b-953f-491c-9986-b5bafcf8cd0b"). InnerVolumeSpecName "kube-api-access-9jkvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.634586 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8f17fdf7-38ee-4c60-a0e9-e293cdd77830" (UID: "8f17fdf7-38ee-4c60-a0e9-e293cdd77830"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.636463 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.646423 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc70bbe1-6f64-4501-b4cd-afd381a50e86-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc70bbe1-6f64-4501-b4cd-afd381a50e86" (UID: "dc70bbe1-6f64-4501-b4cd-afd381a50e86"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.656528 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a16e14b-953f-491c-9986-b5bafcf8cd0b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0a16e14b-953f-491c-9986-b5bafcf8cd0b" (UID: "0a16e14b-953f-491c-9986-b5bafcf8cd0b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.680890 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac8209ab-0e14-4fed-9dcc-0978176748a1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac8209ab-0e14-4fed-9dcc-0978176748a1" (UID: "ac8209ab-0e14-4fed-9dcc-0978176748a1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.680994 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/303af9ef-3014-4b33-ba8e-f6b4a9227485-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "303af9ef-3014-4b33-ba8e-f6b4a9227485" (UID: "303af9ef-3014-4b33-ba8e-f6b4a9227485"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.683332 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thkcp\" (UniqueName: \"kubernetes.io/projected/3c8ded03-464c-4a85-8468-067607680129-kube-api-access-thkcp\") pod \"3c8ded03-464c-4a85-8468-067607680129\" (UID: \"3c8ded03-464c-4a85-8468-067607680129\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.683400 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c8ded03-464c-4a85-8468-067607680129-utilities\") pod \"3c8ded03-464c-4a85-8468-067607680129\" (UID: \"3c8ded03-464c-4a85-8468-067607680129\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.683556 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c8ded03-464c-4a85-8468-067607680129-catalog-content\") pod \"3c8ded03-464c-4a85-8468-067607680129\" (UID: \"3c8ded03-464c-4a85-8468-067607680129\") " Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.683892 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xd88\" (UniqueName: \"kubernetes.io/projected/ac8209ab-0e14-4fed-9dcc-0978176748a1-kube-api-access-5xd88\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.683917 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/303af9ef-3014-4b33-ba8e-f6b4a9227485-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.683928 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc70bbe1-6f64-4501-b4cd-afd381a50e86-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.683943 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5qdz\" (UniqueName: \"kubernetes.io/projected/dc70bbe1-6f64-4501-b4cd-afd381a50e86-kube-api-access-s5qdz\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.683956 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a16e14b-953f-491c-9986-b5bafcf8cd0b-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.683967 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8mls\" (UniqueName: \"kubernetes.io/projected/303af9ef-3014-4b33-ba8e-f6b4a9227485-kube-api-access-w8mls\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.683978 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.683989 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a16e14b-953f-491c-9986-b5bafcf8cd0b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.683999 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.684010 4711 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8b4e79a6-ff8b-4293-931b-bde9f25b7576-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.684021 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2c8fv\" (UniqueName: \"kubernetes.io/projected/9ef23d02-cc65-4020-897b-3e114c07d801-kube-api-access-2c8fv\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.684032 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc70bbe1-6f64-4501-b4cd-afd381a50e86-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.684044 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9w8v8\" (UniqueName: \"kubernetes.io/projected/8f17fdf7-38ee-4c60-a0e9-e293cdd77830-kube-api-access-9w8v8\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.684055 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ef23d02-cc65-4020-897b-3e114c07d801-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.684067 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/303af9ef-3014-4b33-ba8e-f6b4a9227485-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.684077 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac8209ab-0e14-4fed-9dcc-0978176748a1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.684087 4711 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8b4e79a6-ff8b-4293-931b-bde9f25b7576-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.684099 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac8209ab-0e14-4fed-9dcc-0978176748a1-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.684111 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jkvl\" (UniqueName: \"kubernetes.io/projected/0a16e14b-953f-491c-9986-b5bafcf8cd0b-kube-api-access-9jkvl\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.684122 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq9gk\" (UniqueName: \"kubernetes.io/projected/8b4e79a6-ff8b-4293-931b-bde9f25b7576-kube-api-access-mq9gk\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.685629 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c8ded03-464c-4a85-8468-067607680129-utilities" (OuterVolumeSpecName: "utilities") pod "3c8ded03-464c-4a85-8468-067607680129" (UID: "3c8ded03-464c-4a85-8468-067607680129"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.686980 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c8ded03-464c-4a85-8468-067607680129-kube-api-access-thkcp" (OuterVolumeSpecName: "kube-api-access-thkcp") pod "3c8ded03-464c-4a85-8468-067607680129" (UID: "3c8ded03-464c-4a85-8468-067607680129"). InnerVolumeSpecName "kube-api-access-thkcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.729894 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ef23d02-cc65-4020-897b-3e114c07d801-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ef23d02-cc65-4020-897b-3e114c07d801" (UID: "9ef23d02-cc65-4020-897b-3e114c07d801"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.786250 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thkcp\" (UniqueName: \"kubernetes.io/projected/3c8ded03-464c-4a85-8468-067607680129-kube-api-access-thkcp\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.786317 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c8ded03-464c-4a85-8468-067607680129-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.786334 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ef23d02-cc65-4020-897b-3e114c07d801-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.801269 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c8ded03-464c-4a85-8468-067607680129-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3c8ded03-464c-4a85-8468-067607680129" (UID: "3c8ded03-464c-4a85-8468-067607680129"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:25:28 crc kubenswrapper[4711]: I0123 08:25:28.887812 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c8ded03-464c-4a85-8468-067607680129-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.434704 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-885xw" event={"ID":"9ef23d02-cc65-4020-897b-3e114c07d801","Type":"ContainerDied","Data":"02009f5c2e13aa2f4851fe5ff42e8966ed046a60f0c36498af5f108f8f60720d"} Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.434761 4711 scope.go:117] "RemoveContainer" containerID="7ed25de54c7ac3f30e02bac19b14adfa13f4b548b199038259df1e4936d6aec3" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.434917 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-885xw" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.438556 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqptv" event={"ID":"3c8ded03-464c-4a85-8468-067607680129","Type":"ContainerDied","Data":"0d48bf623af35170c54f120f1e9916f9a5d75bdf427f2c5defb7a02767b52c76"} Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.438672 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kqptv" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.440395 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7xzhz" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.440409 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" event={"ID":"a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520","Type":"ContainerStarted","Data":"014f434971fe6603a697045de10d0150177860aebb559f94aac2b70600f950ad"} Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.440434 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" event={"ID":"a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520","Type":"ContainerStarted","Data":"f2dd3a6c0298b01f06c7d041e800c3534c05515ec98e75e47e866b80053e73a5"} Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.440399 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9qd4n" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.440453 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shg2k" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.440473 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q5bgt" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.440489 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qq59p" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.440744 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rhrsx" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.440911 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.444312 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.462264 4711 scope.go:117] "RemoveContainer" containerID="63addd9321b55d26b9624e91027963f213c3228268dff7adf9855543e5230989" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.480887 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-hzv6z" podStartSLOduration=2.480863789 podStartE2EDuration="2.480863789s" podCreationTimestamp="2026-01-23 08:25:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:25:29.474599566 +0000 UTC m=+315.047555954" watchObservedRunningTime="2026-01-23 08:25:29.480863789 +0000 UTC m=+315.053820157" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.496680 4711 scope.go:117] "RemoveContainer" containerID="032013d65a98f52d05eef1cb29427306437c6a4885549ffb763ec167e9be582d" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.515902 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-885xw"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.523403 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-885xw"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.531769 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rhrsx"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.537819 4711 scope.go:117] "RemoveContainer" containerID="dbe9cb549bee3b6070e10ca9a47cc03f0babb2de19acd8068614105f6fa7154a" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.540078 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rhrsx"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.547265 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9qd4n"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.552907 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9qd4n"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.559079 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qq59p"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.561019 4711 scope.go:117] "RemoveContainer" containerID="2f48a6b83df4f3dab6dc0ac340d77e6c996886e38bde3ceebbc3d65bc936e6a5" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.574110 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qq59p"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.577645 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-shg2k"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.580049 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-shg2k"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.583084 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xzhz"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.584889 4711 scope.go:117] "RemoveContainer" containerID="4131b1d0cce1d3e24ba46beba0b306cb7fb435d51f024c62fe66cd2f24da5efe" Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.586378 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xzhz"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.592275 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q5bgt"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.595830 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-q5bgt"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.606018 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kqptv"] Jan 23 08:25:29 crc kubenswrapper[4711]: I0123 08:25:29.609016 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kqptv"] Jan 23 08:25:31 crc kubenswrapper[4711]: I0123 08:25:31.485099 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" path="/var/lib/kubelet/pods/0a16e14b-953f-491c-9986-b5bafcf8cd0b/volumes" Jan 23 08:25:31 crc kubenswrapper[4711]: I0123 08:25:31.486440 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" path="/var/lib/kubelet/pods/303af9ef-3014-4b33-ba8e-f6b4a9227485/volumes" Jan 23 08:25:31 crc kubenswrapper[4711]: I0123 08:25:31.487724 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c8ded03-464c-4a85-8468-067607680129" path="/var/lib/kubelet/pods/3c8ded03-464c-4a85-8468-067607680129/volumes" Jan 23 08:25:31 crc kubenswrapper[4711]: I0123 08:25:31.489896 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b4e79a6-ff8b-4293-931b-bde9f25b7576" path="/var/lib/kubelet/pods/8b4e79a6-ff8b-4293-931b-bde9f25b7576/volumes" Jan 23 08:25:31 crc kubenswrapper[4711]: I0123 08:25:31.490835 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" path="/var/lib/kubelet/pods/8f17fdf7-38ee-4c60-a0e9-e293cdd77830/volumes" Jan 23 08:25:31 crc kubenswrapper[4711]: I0123 08:25:31.492798 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" path="/var/lib/kubelet/pods/9ef23d02-cc65-4020-897b-3e114c07d801/volumes" Jan 23 08:25:31 crc kubenswrapper[4711]: I0123 08:25:31.494844 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" path="/var/lib/kubelet/pods/ac8209ab-0e14-4fed-9dcc-0978176748a1/volumes" Jan 23 08:25:31 crc kubenswrapper[4711]: I0123 08:25:31.496624 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" path="/var/lib/kubelet/pods/dc70bbe1-6f64-4501-b4cd-afd381a50e86/volumes" Jan 23 08:25:37 crc kubenswrapper[4711]: I0123 08:25:37.790367 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vsck7"] Jan 23 08:25:37 crc kubenswrapper[4711]: I0123 08:25:37.791245 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" podUID="b328000b-5587-4645-a3b6-02397de51cf6" containerName="controller-manager" containerID="cri-o://e09f43dbf78d38d2f610d845ed1964c7ab19280b10430b0f68a2f5925492bfaa" gracePeriod=30 Jan 23 08:25:37 crc kubenswrapper[4711]: I0123 08:25:37.834500 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp"] Jan 23 08:25:37 crc kubenswrapper[4711]: I0123 08:25:37.834743 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" podUID="86145ace-e2d3-4b5b-9475-f52b19faa9df" containerName="route-controller-manager" containerID="cri-o://afadebbd640434a996a5a893e437e307dfc813dbccdae4ea361bb3cb6e790178" gracePeriod=30 Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.494167 4711 generic.go:334] "Generic (PLEG): container finished" podID="b328000b-5587-4645-a3b6-02397de51cf6" containerID="e09f43dbf78d38d2f610d845ed1964c7ab19280b10430b0f68a2f5925492bfaa" exitCode=0 Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.494386 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" event={"ID":"b328000b-5587-4645-a3b6-02397de51cf6","Type":"ContainerDied","Data":"e09f43dbf78d38d2f610d845ed1964c7ab19280b10430b0f68a2f5925492bfaa"} Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.498728 4711 generic.go:334] "Generic (PLEG): container finished" podID="86145ace-e2d3-4b5b-9475-f52b19faa9df" containerID="afadebbd640434a996a5a893e437e307dfc813dbccdae4ea361bb3cb6e790178" exitCode=0 Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.498754 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" event={"ID":"86145ace-e2d3-4b5b-9475-f52b19faa9df","Type":"ContainerDied","Data":"afadebbd640434a996a5a893e437e307dfc813dbccdae4ea361bb3cb6e790178"} Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.498769 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" event={"ID":"86145ace-e2d3-4b5b-9475-f52b19faa9df","Type":"ContainerDied","Data":"8e718114153b12f96e59f9f472054b60b61e2b732324fe090bc870ee8f6041a5"} Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.498779 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e718114153b12f96e59f9f472054b60b61e2b732324fe090bc870ee8f6041a5" Jan 23 08:25:38 crc kubenswrapper[4711]: E0123 08:25:38.573597 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb328000b_5587_4645_a3b6_02397de51cf6.slice/crio-e09f43dbf78d38d2f610d845ed1964c7ab19280b10430b0f68a2f5925492bfaa.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb328000b_5587_4645_a3b6_02397de51cf6.slice/crio-conmon-e09f43dbf78d38d2f610d845ed1964c7ab19280b10430b0f68a2f5925492bfaa.scope\": RecentStats: unable to find data in memory cache]" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.607485 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.669972 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.764029 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/86145ace-e2d3-4b5b-9475-f52b19faa9df-client-ca\") pod \"86145ace-e2d3-4b5b-9475-f52b19faa9df\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.764069 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86145ace-e2d3-4b5b-9475-f52b19faa9df-config\") pod \"86145ace-e2d3-4b5b-9475-f52b19faa9df\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.764113 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86145ace-e2d3-4b5b-9475-f52b19faa9df-serving-cert\") pod \"86145ace-e2d3-4b5b-9475-f52b19faa9df\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.764136 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-config\") pod \"b328000b-5587-4645-a3b6-02397de51cf6\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.764202 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kpfq\" (UniqueName: \"kubernetes.io/projected/86145ace-e2d3-4b5b-9475-f52b19faa9df-kube-api-access-9kpfq\") pod \"86145ace-e2d3-4b5b-9475-f52b19faa9df\" (UID: \"86145ace-e2d3-4b5b-9475-f52b19faa9df\") " Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.764238 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgf6g\" (UniqueName: \"kubernetes.io/projected/b328000b-5587-4645-a3b6-02397de51cf6-kube-api-access-sgf6g\") pod \"b328000b-5587-4645-a3b6-02397de51cf6\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.764271 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-client-ca\") pod \"b328000b-5587-4645-a3b6-02397de51cf6\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.764301 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b328000b-5587-4645-a3b6-02397de51cf6-serving-cert\") pod \"b328000b-5587-4645-a3b6-02397de51cf6\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.764342 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-proxy-ca-bundles\") pod \"b328000b-5587-4645-a3b6-02397de51cf6\" (UID: \"b328000b-5587-4645-a3b6-02397de51cf6\") " Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.765056 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86145ace-e2d3-4b5b-9475-f52b19faa9df-client-ca" (OuterVolumeSpecName: "client-ca") pod "86145ace-e2d3-4b5b-9475-f52b19faa9df" (UID: "86145ace-e2d3-4b5b-9475-f52b19faa9df"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.765133 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "b328000b-5587-4645-a3b6-02397de51cf6" (UID: "b328000b-5587-4645-a3b6-02397de51cf6"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.765125 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-client-ca" (OuterVolumeSpecName: "client-ca") pod "b328000b-5587-4645-a3b6-02397de51cf6" (UID: "b328000b-5587-4645-a3b6-02397de51cf6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.765493 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86145ace-e2d3-4b5b-9475-f52b19faa9df-config" (OuterVolumeSpecName: "config") pod "86145ace-e2d3-4b5b-9475-f52b19faa9df" (UID: "86145ace-e2d3-4b5b-9475-f52b19faa9df"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.765722 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-config" (OuterVolumeSpecName: "config") pod "b328000b-5587-4645-a3b6-02397de51cf6" (UID: "b328000b-5587-4645-a3b6-02397de51cf6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.769480 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86145ace-e2d3-4b5b-9475-f52b19faa9df-kube-api-access-9kpfq" (OuterVolumeSpecName: "kube-api-access-9kpfq") pod "86145ace-e2d3-4b5b-9475-f52b19faa9df" (UID: "86145ace-e2d3-4b5b-9475-f52b19faa9df"). InnerVolumeSpecName "kube-api-access-9kpfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.769804 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86145ace-e2d3-4b5b-9475-f52b19faa9df-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "86145ace-e2d3-4b5b-9475-f52b19faa9df" (UID: "86145ace-e2d3-4b5b-9475-f52b19faa9df"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.769869 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b328000b-5587-4645-a3b6-02397de51cf6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b328000b-5587-4645-a3b6-02397de51cf6" (UID: "b328000b-5587-4645-a3b6-02397de51cf6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.770585 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b328000b-5587-4645-a3b6-02397de51cf6-kube-api-access-sgf6g" (OuterVolumeSpecName: "kube-api-access-sgf6g") pod "b328000b-5587-4645-a3b6-02397de51cf6" (UID: "b328000b-5587-4645-a3b6-02397de51cf6"). InnerVolumeSpecName "kube-api-access-sgf6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.865312 4711 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/86145ace-e2d3-4b5b-9475-f52b19faa9df-client-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.865343 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86145ace-e2d3-4b5b-9475-f52b19faa9df-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.865351 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86145ace-e2d3-4b5b-9475-f52b19faa9df-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.865359 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.865368 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kpfq\" (UniqueName: \"kubernetes.io/projected/86145ace-e2d3-4b5b-9475-f52b19faa9df-kube-api-access-9kpfq\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.865378 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgf6g\" (UniqueName: \"kubernetes.io/projected/b328000b-5587-4645-a3b6-02397de51cf6-kube-api-access-sgf6g\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.865386 4711 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-client-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.865394 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b328000b-5587-4645-a3b6-02397de51cf6-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:38 crc kubenswrapper[4711]: I0123 08:25:38.865401 4711 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b328000b-5587-4645-a3b6-02397de51cf6-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.123663 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr"] Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.123951 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c8ded03-464c-4a85-8468-067607680129" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.123965 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c8ded03-464c-4a85-8468-067607680129" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.123977 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.123987 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124001 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124009 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124019 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c8ded03-464c-4a85-8468-067607680129" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124028 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c8ded03-464c-4a85-8468-067607680129" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124037 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86145ace-e2d3-4b5b-9475-f52b19faa9df" containerName="route-controller-manager" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124045 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="86145ace-e2d3-4b5b-9475-f52b19faa9df" containerName="route-controller-manager" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124058 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124065 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124074 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124082 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124093 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124102 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124114 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c8ded03-464c-4a85-8468-067607680129" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124121 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c8ded03-464c-4a85-8468-067607680129" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124132 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124139 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124148 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124156 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124170 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124177 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124188 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124196 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124204 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124213 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124221 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124228 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124244 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124251 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124261 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b4e79a6-ff8b-4293-931b-bde9f25b7576" containerName="marketplace-operator" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124269 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b4e79a6-ff8b-4293-931b-bde9f25b7576" containerName="marketplace-operator" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124279 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124287 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" containerName="extract-content" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124299 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124306 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124316 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b328000b-5587-4645-a3b6-02397de51cf6" containerName="controller-manager" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124324 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b328000b-5587-4645-a3b6-02397de51cf6" containerName="controller-manager" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124335 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124342 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124351 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124358 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124370 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124378 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: E0123 08:25:39.124388 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124395 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" containerName="extract-utilities" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124494 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac8209ab-0e14-4fed-9dcc-0978176748a1" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124529 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c8ded03-464c-4a85-8468-067607680129" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124540 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="303af9ef-3014-4b33-ba8e-f6b4a9227485" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124550 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f17fdf7-38ee-4c60-a0e9-e293cdd77830" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124560 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="86145ace-e2d3-4b5b-9475-f52b19faa9df" containerName="route-controller-manager" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124570 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b4e79a6-ff8b-4293-931b-bde9f25b7576" containerName="marketplace-operator" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124581 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ef23d02-cc65-4020-897b-3e114c07d801" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124590 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc70bbe1-6f64-4501-b4cd-afd381a50e86" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124598 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b328000b-5587-4645-a3b6-02397de51cf6" containerName="controller-manager" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.124609 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a16e14b-953f-491c-9986-b5bafcf8cd0b" containerName="registry-server" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.125105 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.126977 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-dbf56b754-fwmr8"] Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.127731 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.134156 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-dbf56b754-fwmr8"] Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.142645 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr"] Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.269719 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjmxc\" (UniqueName: \"kubernetes.io/projected/95e6cf0a-1b13-4b60-a526-84a9705274c0-kube-api-access-sjmxc\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.270120 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-proxy-ca-bundles\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.270146 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-serving-cert\") pod \"route-controller-manager-98998769b-g8kmr\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.270172 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-config\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.270200 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/95e6cf0a-1b13-4b60-a526-84a9705274c0-serving-cert\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.270396 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nlv9\" (UniqueName: \"kubernetes.io/projected/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-kube-api-access-8nlv9\") pod \"route-controller-manager-98998769b-g8kmr\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.270454 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-config\") pod \"route-controller-manager-98998769b-g8kmr\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.270494 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-client-ca\") pod \"route-controller-manager-98998769b-g8kmr\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.270545 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-client-ca\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.371440 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-client-ca\") pod \"route-controller-manager-98998769b-g8kmr\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.371528 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-client-ca\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.371610 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjmxc\" (UniqueName: \"kubernetes.io/projected/95e6cf0a-1b13-4b60-a526-84a9705274c0-kube-api-access-sjmxc\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.371652 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-proxy-ca-bundles\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.371679 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-serving-cert\") pod \"route-controller-manager-98998769b-g8kmr\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.372486 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-client-ca\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.373147 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-client-ca\") pod \"route-controller-manager-98998769b-g8kmr\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.373330 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-proxy-ca-bundles\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.373431 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-config\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.374424 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-config\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.373462 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/95e6cf0a-1b13-4b60-a526-84a9705274c0-serving-cert\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.374533 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nlv9\" (UniqueName: \"kubernetes.io/projected/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-kube-api-access-8nlv9\") pod \"route-controller-manager-98998769b-g8kmr\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.374559 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-config\") pod \"route-controller-manager-98998769b-g8kmr\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.375323 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-config\") pod \"route-controller-manager-98998769b-g8kmr\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.379416 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-serving-cert\") pod \"route-controller-manager-98998769b-g8kmr\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.379871 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/95e6cf0a-1b13-4b60-a526-84a9705274c0-serving-cert\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.392165 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nlv9\" (UniqueName: \"kubernetes.io/projected/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-kube-api-access-8nlv9\") pod \"route-controller-manager-98998769b-g8kmr\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.392201 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjmxc\" (UniqueName: \"kubernetes.io/projected/95e6cf0a-1b13-4b60-a526-84a9705274c0-kube-api-access-sjmxc\") pod \"controller-manager-dbf56b754-fwmr8\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.441643 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.461971 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.525189 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.525219 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.525186 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vsck7" event={"ID":"b328000b-5587-4645-a3b6-02397de51cf6","Type":"ContainerDied","Data":"9f5567ad5d4c80341566c3cbf9b6edff8fe4b7580ddeb2dbc0e6619d6072bd14"} Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.525360 4711 scope.go:117] "RemoveContainer" containerID="e09f43dbf78d38d2f610d845ed1964c7ab19280b10430b0f68a2f5925492bfaa" Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.548330 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vsck7"] Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.558304 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vsck7"] Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.590297 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp"] Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.598494 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-kqstp"] Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.721983 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-dbf56b754-fwmr8"] Jan 23 08:25:39 crc kubenswrapper[4711]: I0123 08:25:39.955644 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr"] Jan 23 08:25:39 crc kubenswrapper[4711]: W0123 08:25:39.962978 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5105cff0_d8f7_4b40_9a92_8c02cba33dbe.slice/crio-e1f83e64db72e117aa7e4699e71d314867e3a4f149bc66957e75e9fda81c9eac WatchSource:0}: Error finding container e1f83e64db72e117aa7e4699e71d314867e3a4f149bc66957e75e9fda81c9eac: Status 404 returned error can't find the container with id e1f83e64db72e117aa7e4699e71d314867e3a4f149bc66957e75e9fda81c9eac Jan 23 08:25:40 crc kubenswrapper[4711]: I0123 08:25:40.533414 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" event={"ID":"5105cff0-d8f7-4b40-9a92-8c02cba33dbe","Type":"ContainerStarted","Data":"7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442"} Jan 23 08:25:40 crc kubenswrapper[4711]: I0123 08:25:40.533460 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" event={"ID":"5105cff0-d8f7-4b40-9a92-8c02cba33dbe","Type":"ContainerStarted","Data":"e1f83e64db72e117aa7e4699e71d314867e3a4f149bc66957e75e9fda81c9eac"} Jan 23 08:25:40 crc kubenswrapper[4711]: I0123 08:25:40.533719 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:40 crc kubenswrapper[4711]: I0123 08:25:40.536267 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" event={"ID":"95e6cf0a-1b13-4b60-a526-84a9705274c0","Type":"ContainerStarted","Data":"636a5eb1ea1aed0d4526047fac8b69163e9ba8a79cd51b69d06c561f7aabfd00"} Jan 23 08:25:40 crc kubenswrapper[4711]: I0123 08:25:40.536315 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" event={"ID":"95e6cf0a-1b13-4b60-a526-84a9705274c0","Type":"ContainerStarted","Data":"b0773b06258d70fc12f16adae18f63ab0569d5a8417096954d54e57f14d3028b"} Jan 23 08:25:40 crc kubenswrapper[4711]: I0123 08:25:40.536525 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:40 crc kubenswrapper[4711]: I0123 08:25:40.537964 4711 patch_prober.go:28] interesting pod/controller-manager-dbf56b754-fwmr8 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" start-of-body= Jan 23 08:25:40 crc kubenswrapper[4711]: I0123 08:25:40.538028 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" podUID="95e6cf0a-1b13-4b60-a526-84a9705274c0" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" Jan 23 08:25:40 crc kubenswrapper[4711]: I0123 08:25:40.573730 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" podStartSLOduration=2.573696742 podStartE2EDuration="2.573696742s" podCreationTimestamp="2026-01-23 08:25:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:25:40.553102866 +0000 UTC m=+326.126059254" watchObservedRunningTime="2026-01-23 08:25:40.573696742 +0000 UTC m=+326.146653110" Jan 23 08:25:40 crc kubenswrapper[4711]: I0123 08:25:40.575833 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" podStartSLOduration=2.575824613 podStartE2EDuration="2.575824613s" podCreationTimestamp="2026-01-23 08:25:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:25:40.570691688 +0000 UTC m=+326.143648066" watchObservedRunningTime="2026-01-23 08:25:40.575824613 +0000 UTC m=+326.148780981" Jan 23 08:25:40 crc kubenswrapper[4711]: I0123 08:25:40.838025 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:25:41 crc kubenswrapper[4711]: I0123 08:25:41.496148 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86145ace-e2d3-4b5b-9475-f52b19faa9df" path="/var/lib/kubelet/pods/86145ace-e2d3-4b5b-9475-f52b19faa9df/volumes" Jan 23 08:25:41 crc kubenswrapper[4711]: I0123 08:25:41.498839 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b328000b-5587-4645-a3b6-02397de51cf6" path="/var/lib/kubelet/pods/b328000b-5587-4645-a3b6-02397de51cf6/volumes" Jan 23 08:25:41 crc kubenswrapper[4711]: I0123 08:25:41.546538 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:25:58 crc kubenswrapper[4711]: I0123 08:25:58.539154 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr"] Jan 23 08:25:58 crc kubenswrapper[4711]: I0123 08:25:58.541144 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" podUID="5105cff0-d8f7-4b40-9a92-8c02cba33dbe" containerName="route-controller-manager" containerID="cri-o://7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442" gracePeriod=30 Jan 23 08:25:59 crc kubenswrapper[4711]: I0123 08:25:59.443814 4711 patch_prober.go:28] interesting pod/route-controller-manager-98998769b-g8kmr container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.58:8443/healthz\": dial tcp 10.217.0.58:8443: connect: connection refused" start-of-body= Jan 23 08:25:59 crc kubenswrapper[4711]: I0123 08:25:59.444163 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" podUID="5105cff0-d8f7-4b40-9a92-8c02cba33dbe" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.58:8443/healthz\": dial tcp 10.217.0.58:8443: connect: connection refused" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.078909 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.106380 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s"] Jan 23 08:26:00 crc kubenswrapper[4711]: E0123 08:26:00.106646 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5105cff0-d8f7-4b40-9a92-8c02cba33dbe" containerName="route-controller-manager" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.106664 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5105cff0-d8f7-4b40-9a92-8c02cba33dbe" containerName="route-controller-manager" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.106772 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5105cff0-d8f7-4b40-9a92-8c02cba33dbe" containerName="route-controller-manager" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.107254 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.116941 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s"] Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.241216 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-config\") pod \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.241627 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nlv9\" (UniqueName: \"kubernetes.io/projected/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-kube-api-access-8nlv9\") pod \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.241691 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-client-ca\") pod \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.241794 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-serving-cert\") pod \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\" (UID: \"5105cff0-d8f7-4b40-9a92-8c02cba33dbe\") " Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.242070 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19b6a7c1-aed8-4d51-9f96-e10870723f3e-serving-cert\") pod \"route-controller-manager-68479564c-gcw9s\" (UID: \"19b6a7c1-aed8-4d51-9f96-e10870723f3e\") " pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.242127 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19b6a7c1-aed8-4d51-9f96-e10870723f3e-config\") pod \"route-controller-manager-68479564c-gcw9s\" (UID: \"19b6a7c1-aed8-4d51-9f96-e10870723f3e\") " pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.242156 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqsdq\" (UniqueName: \"kubernetes.io/projected/19b6a7c1-aed8-4d51-9f96-e10870723f3e-kube-api-access-lqsdq\") pod \"route-controller-manager-68479564c-gcw9s\" (UID: \"19b6a7c1-aed8-4d51-9f96-e10870723f3e\") " pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.242201 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19b6a7c1-aed8-4d51-9f96-e10870723f3e-client-ca\") pod \"route-controller-manager-68479564c-gcw9s\" (UID: \"19b6a7c1-aed8-4d51-9f96-e10870723f3e\") " pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.242397 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-client-ca" (OuterVolumeSpecName: "client-ca") pod "5105cff0-d8f7-4b40-9a92-8c02cba33dbe" (UID: "5105cff0-d8f7-4b40-9a92-8c02cba33dbe"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.242777 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-config" (OuterVolumeSpecName: "config") pod "5105cff0-d8f7-4b40-9a92-8c02cba33dbe" (UID: "5105cff0-d8f7-4b40-9a92-8c02cba33dbe"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.248383 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5105cff0-d8f7-4b40-9a92-8c02cba33dbe" (UID: "5105cff0-d8f7-4b40-9a92-8c02cba33dbe"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.248631 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-kube-api-access-8nlv9" (OuterVolumeSpecName: "kube-api-access-8nlv9") pod "5105cff0-d8f7-4b40-9a92-8c02cba33dbe" (UID: "5105cff0-d8f7-4b40-9a92-8c02cba33dbe"). InnerVolumeSpecName "kube-api-access-8nlv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.343186 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19b6a7c1-aed8-4d51-9f96-e10870723f3e-serving-cert\") pod \"route-controller-manager-68479564c-gcw9s\" (UID: \"19b6a7c1-aed8-4d51-9f96-e10870723f3e\") " pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.343530 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19b6a7c1-aed8-4d51-9f96-e10870723f3e-config\") pod \"route-controller-manager-68479564c-gcw9s\" (UID: \"19b6a7c1-aed8-4d51-9f96-e10870723f3e\") " pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.343618 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqsdq\" (UniqueName: \"kubernetes.io/projected/19b6a7c1-aed8-4d51-9f96-e10870723f3e-kube-api-access-lqsdq\") pod \"route-controller-manager-68479564c-gcw9s\" (UID: \"19b6a7c1-aed8-4d51-9f96-e10870723f3e\") " pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.343729 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19b6a7c1-aed8-4d51-9f96-e10870723f3e-client-ca\") pod \"route-controller-manager-68479564c-gcw9s\" (UID: \"19b6a7c1-aed8-4d51-9f96-e10870723f3e\") " pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.343843 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nlv9\" (UniqueName: \"kubernetes.io/projected/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-kube-api-access-8nlv9\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.343907 4711 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-client-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.343962 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.344017 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5105cff0-d8f7-4b40-9a92-8c02cba33dbe-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.344667 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19b6a7c1-aed8-4d51-9f96-e10870723f3e-client-ca\") pod \"route-controller-manager-68479564c-gcw9s\" (UID: \"19b6a7c1-aed8-4d51-9f96-e10870723f3e\") " pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.345045 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19b6a7c1-aed8-4d51-9f96-e10870723f3e-config\") pod \"route-controller-manager-68479564c-gcw9s\" (UID: \"19b6a7c1-aed8-4d51-9f96-e10870723f3e\") " pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.346564 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19b6a7c1-aed8-4d51-9f96-e10870723f3e-serving-cert\") pod \"route-controller-manager-68479564c-gcw9s\" (UID: \"19b6a7c1-aed8-4d51-9f96-e10870723f3e\") " pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.362545 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqsdq\" (UniqueName: \"kubernetes.io/projected/19b6a7c1-aed8-4d51-9f96-e10870723f3e-kube-api-access-lqsdq\") pod \"route-controller-manager-68479564c-gcw9s\" (UID: \"19b6a7c1-aed8-4d51-9f96-e10870723f3e\") " pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.432860 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.643577 4711 generic.go:334] "Generic (PLEG): container finished" podID="5105cff0-d8f7-4b40-9a92-8c02cba33dbe" containerID="7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442" exitCode=0 Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.643656 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.643653 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" event={"ID":"5105cff0-d8f7-4b40-9a92-8c02cba33dbe","Type":"ContainerDied","Data":"7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442"} Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.643741 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr" event={"ID":"5105cff0-d8f7-4b40-9a92-8c02cba33dbe","Type":"ContainerDied","Data":"e1f83e64db72e117aa7e4699e71d314867e3a4f149bc66957e75e9fda81c9eac"} Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.643773 4711 scope.go:117] "RemoveContainer" containerID="7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.683447 4711 scope.go:117] "RemoveContainer" containerID="7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442" Jan 23 08:26:00 crc kubenswrapper[4711]: E0123 08:26:00.686327 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442\": container with ID starting with 7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442 not found: ID does not exist" containerID="7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.686375 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442"} err="failed to get container status \"7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442\": rpc error: code = NotFound desc = could not find container \"7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442\": container with ID starting with 7e0cc33d69823ffe41eccfe7122ecae3ccbc4a003425e60d27755b17b1f39442 not found: ID does not exist" Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.686431 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr"] Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.689734 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-98998769b-g8kmr"] Jan 23 08:26:00 crc kubenswrapper[4711]: I0123 08:26:00.927899 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s"] Jan 23 08:26:00 crc kubenswrapper[4711]: W0123 08:26:00.938537 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19b6a7c1_aed8_4d51_9f96_e10870723f3e.slice/crio-5aaf79a1cc56a0850cea375d229a3a98c3397c36c6f8f19a4bd375ce08098b3f WatchSource:0}: Error finding container 5aaf79a1cc56a0850cea375d229a3a98c3397c36c6f8f19a4bd375ce08098b3f: Status 404 returned error can't find the container with id 5aaf79a1cc56a0850cea375d229a3a98c3397c36c6f8f19a4bd375ce08098b3f Jan 23 08:26:01 crc kubenswrapper[4711]: I0123 08:26:01.479355 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5105cff0-d8f7-4b40-9a92-8c02cba33dbe" path="/var/lib/kubelet/pods/5105cff0-d8f7-4b40-9a92-8c02cba33dbe/volumes" Jan 23 08:26:01 crc kubenswrapper[4711]: I0123 08:26:01.651605 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" event={"ID":"19b6a7c1-aed8-4d51-9f96-e10870723f3e","Type":"ContainerStarted","Data":"259af68146fbe106bae4687094d86ef691308c9540a31e297f6b12c6e0c50e14"} Jan 23 08:26:01 crc kubenswrapper[4711]: I0123 08:26:01.651642 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" event={"ID":"19b6a7c1-aed8-4d51-9f96-e10870723f3e","Type":"ContainerStarted","Data":"5aaf79a1cc56a0850cea375d229a3a98c3397c36c6f8f19a4bd375ce08098b3f"} Jan 23 08:26:01 crc kubenswrapper[4711]: I0123 08:26:01.653835 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:01 crc kubenswrapper[4711]: I0123 08:26:01.670968 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" podStartSLOduration=3.670952985 podStartE2EDuration="3.670952985s" podCreationTimestamp="2026-01-23 08:25:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:26:01.670882254 +0000 UTC m=+347.243838612" watchObservedRunningTime="2026-01-23 08:26:01.670952985 +0000 UTC m=+347.243909363" Jan 23 08:26:01 crc kubenswrapper[4711]: I0123 08:26:01.931739 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-68479564c-gcw9s" Jan 23 08:26:03 crc kubenswrapper[4711]: I0123 08:26:03.740244 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hkpkn"] Jan 23 08:26:03 crc kubenswrapper[4711]: I0123 08:26:03.741373 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:03 crc kubenswrapper[4711]: I0123 08:26:03.756234 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hkpkn"] Jan 23 08:26:03 crc kubenswrapper[4711]: I0123 08:26:03.899942 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bd82e682-2325-4191-9810-17af230ee677-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:03 crc kubenswrapper[4711]: I0123 08:26:03.899994 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bd82e682-2325-4191-9810-17af230ee677-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:03 crc kubenswrapper[4711]: I0123 08:26:03.900028 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:03 crc kubenswrapper[4711]: I0123 08:26:03.900058 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bd82e682-2325-4191-9810-17af230ee677-registry-certificates\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:03 crc kubenswrapper[4711]: I0123 08:26:03.900086 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4r7q\" (UniqueName: \"kubernetes.io/projected/bd82e682-2325-4191-9810-17af230ee677-kube-api-access-f4r7q\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:03 crc kubenswrapper[4711]: I0123 08:26:03.900228 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bd82e682-2325-4191-9810-17af230ee677-trusted-ca\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:03 crc kubenswrapper[4711]: I0123 08:26:03.900455 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bd82e682-2325-4191-9810-17af230ee677-registry-tls\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:03 crc kubenswrapper[4711]: I0123 08:26:03.900538 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bd82e682-2325-4191-9810-17af230ee677-bound-sa-token\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:03 crc kubenswrapper[4711]: I0123 08:26:03.922049 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.001946 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bd82e682-2325-4191-9810-17af230ee677-registry-tls\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.002011 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bd82e682-2325-4191-9810-17af230ee677-bound-sa-token\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.002044 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bd82e682-2325-4191-9810-17af230ee677-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.002066 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bd82e682-2325-4191-9810-17af230ee677-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.002094 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bd82e682-2325-4191-9810-17af230ee677-registry-certificates\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.002122 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4r7q\" (UniqueName: \"kubernetes.io/projected/bd82e682-2325-4191-9810-17af230ee677-kube-api-access-f4r7q\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.002142 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bd82e682-2325-4191-9810-17af230ee677-trusted-ca\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.003139 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bd82e682-2325-4191-9810-17af230ee677-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.003385 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bd82e682-2325-4191-9810-17af230ee677-registry-certificates\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.003524 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bd82e682-2325-4191-9810-17af230ee677-trusted-ca\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.008712 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bd82e682-2325-4191-9810-17af230ee677-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.008781 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bd82e682-2325-4191-9810-17af230ee677-registry-tls\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.024889 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4r7q\" (UniqueName: \"kubernetes.io/projected/bd82e682-2325-4191-9810-17af230ee677-kube-api-access-f4r7q\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.025559 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bd82e682-2325-4191-9810-17af230ee677-bound-sa-token\") pod \"image-registry-66df7c8f76-hkpkn\" (UID: \"bd82e682-2325-4191-9810-17af230ee677\") " pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.057517 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.449123 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hkpkn"] Jan 23 08:26:04 crc kubenswrapper[4711]: W0123 08:26:04.457656 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd82e682_2325_4191_9810_17af230ee677.slice/crio-20f2d6dc78cba93a4c41473ba1a56930f9035e6603e35c00b10c381f37569be0 WatchSource:0}: Error finding container 20f2d6dc78cba93a4c41473ba1a56930f9035e6603e35c00b10c381f37569be0: Status 404 returned error can't find the container with id 20f2d6dc78cba93a4c41473ba1a56930f9035e6603e35c00b10c381f37569be0 Jan 23 08:26:04 crc kubenswrapper[4711]: I0123 08:26:04.666703 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" event={"ID":"bd82e682-2325-4191-9810-17af230ee677","Type":"ContainerStarted","Data":"20f2d6dc78cba93a4c41473ba1a56930f9035e6603e35c00b10c381f37569be0"} Jan 23 08:26:05 crc kubenswrapper[4711]: I0123 08:26:05.672656 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" event={"ID":"bd82e682-2325-4191-9810-17af230ee677","Type":"ContainerStarted","Data":"79c1388502978e0fe04133bab040a00ad9505089f58171dd2a3d9fefd66a8668"} Jan 23 08:26:05 crc kubenswrapper[4711]: I0123 08:26:05.672960 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.040300 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" podStartSLOduration=20.040275689 podStartE2EDuration="20.040275689s" podCreationTimestamp="2026-01-23 08:26:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:26:05.691358396 +0000 UTC m=+351.264314764" watchObservedRunningTime="2026-01-23 08:26:23.040275689 +0000 UTC m=+368.613232077" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.045488 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nx6zm"] Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.046982 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.049942 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.058008 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nx6zm"] Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.190816 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf7s2\" (UniqueName: \"kubernetes.io/projected/f82925db-b9b1-4c44-8e2f-467607bd171c-kube-api-access-wf7s2\") pod \"redhat-marketplace-nx6zm\" (UID: \"f82925db-b9b1-4c44-8e2f-467607bd171c\") " pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.190873 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f82925db-b9b1-4c44-8e2f-467607bd171c-catalog-content\") pod \"redhat-marketplace-nx6zm\" (UID: \"f82925db-b9b1-4c44-8e2f-467607bd171c\") " pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.190948 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f82925db-b9b1-4c44-8e2f-467607bd171c-utilities\") pod \"redhat-marketplace-nx6zm\" (UID: \"f82925db-b9b1-4c44-8e2f-467607bd171c\") " pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.244837 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7t5gv"] Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.246095 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.253340 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.255482 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7t5gv"] Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.291765 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf7s2\" (UniqueName: \"kubernetes.io/projected/f82925db-b9b1-4c44-8e2f-467607bd171c-kube-api-access-wf7s2\") pod \"redhat-marketplace-nx6zm\" (UID: \"f82925db-b9b1-4c44-8e2f-467607bd171c\") " pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.291818 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f82925db-b9b1-4c44-8e2f-467607bd171c-catalog-content\") pod \"redhat-marketplace-nx6zm\" (UID: \"f82925db-b9b1-4c44-8e2f-467607bd171c\") " pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.291861 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f82925db-b9b1-4c44-8e2f-467607bd171c-utilities\") pod \"redhat-marketplace-nx6zm\" (UID: \"f82925db-b9b1-4c44-8e2f-467607bd171c\") " pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.292362 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f82925db-b9b1-4c44-8e2f-467607bd171c-utilities\") pod \"redhat-marketplace-nx6zm\" (UID: \"f82925db-b9b1-4c44-8e2f-467607bd171c\") " pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.293109 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f82925db-b9b1-4c44-8e2f-467607bd171c-catalog-content\") pod \"redhat-marketplace-nx6zm\" (UID: \"f82925db-b9b1-4c44-8e2f-467607bd171c\") " pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.316178 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf7s2\" (UniqueName: \"kubernetes.io/projected/f82925db-b9b1-4c44-8e2f-467607bd171c-kube-api-access-wf7s2\") pod \"redhat-marketplace-nx6zm\" (UID: \"f82925db-b9b1-4c44-8e2f-467607bd171c\") " pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.393703 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fc0d972-014e-4c07-b699-372362c53774-catalog-content\") pod \"redhat-operators-7t5gv\" (UID: \"5fc0d972-014e-4c07-b699-372362c53774\") " pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.393788 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fc0d972-014e-4c07-b699-372362c53774-utilities\") pod \"redhat-operators-7t5gv\" (UID: \"5fc0d972-014e-4c07-b699-372362c53774\") " pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.393809 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c2pf\" (UniqueName: \"kubernetes.io/projected/5fc0d972-014e-4c07-b699-372362c53774-kube-api-access-4c2pf\") pod \"redhat-operators-7t5gv\" (UID: \"5fc0d972-014e-4c07-b699-372362c53774\") " pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.404530 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.495805 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fc0d972-014e-4c07-b699-372362c53774-catalog-content\") pod \"redhat-operators-7t5gv\" (UID: \"5fc0d972-014e-4c07-b699-372362c53774\") " pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.496338 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fc0d972-014e-4c07-b699-372362c53774-utilities\") pod \"redhat-operators-7t5gv\" (UID: \"5fc0d972-014e-4c07-b699-372362c53774\") " pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.496372 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c2pf\" (UniqueName: \"kubernetes.io/projected/5fc0d972-014e-4c07-b699-372362c53774-kube-api-access-4c2pf\") pod \"redhat-operators-7t5gv\" (UID: \"5fc0d972-014e-4c07-b699-372362c53774\") " pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.496682 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fc0d972-014e-4c07-b699-372362c53774-catalog-content\") pod \"redhat-operators-7t5gv\" (UID: \"5fc0d972-014e-4c07-b699-372362c53774\") " pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.497034 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fc0d972-014e-4c07-b699-372362c53774-utilities\") pod \"redhat-operators-7t5gv\" (UID: \"5fc0d972-014e-4c07-b699-372362c53774\") " pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.532199 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c2pf\" (UniqueName: \"kubernetes.io/projected/5fc0d972-014e-4c07-b699-372362c53774-kube-api-access-4c2pf\") pod \"redhat-operators-7t5gv\" (UID: \"5fc0d972-014e-4c07-b699-372362c53774\") " pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.568330 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.844357 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nx6zm"] Jan 23 08:26:23 crc kubenswrapper[4711]: I0123 08:26:23.853057 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7t5gv"] Jan 23 08:26:24 crc kubenswrapper[4711]: I0123 08:26:24.062606 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-hkpkn" Jan 23 08:26:24 crc kubenswrapper[4711]: I0123 08:26:24.121496 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-58wxx"] Jan 23 08:26:24 crc kubenswrapper[4711]: I0123 08:26:24.785442 4711 generic.go:334] "Generic (PLEG): container finished" podID="5fc0d972-014e-4c07-b699-372362c53774" containerID="8ff1700f405067970288336725347cfb6dd235ad319fbfc66cfc79fdf129acaa" exitCode=0 Jan 23 08:26:24 crc kubenswrapper[4711]: I0123 08:26:24.785498 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7t5gv" event={"ID":"5fc0d972-014e-4c07-b699-372362c53774","Type":"ContainerDied","Data":"8ff1700f405067970288336725347cfb6dd235ad319fbfc66cfc79fdf129acaa"} Jan 23 08:26:24 crc kubenswrapper[4711]: I0123 08:26:24.785817 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7t5gv" event={"ID":"5fc0d972-014e-4c07-b699-372362c53774","Type":"ContainerStarted","Data":"ebbaf5f4b560492529e9386dc5c0aa889512ab9ad1295bb443a9a5ee8691b744"} Jan 23 08:26:24 crc kubenswrapper[4711]: I0123 08:26:24.787700 4711 generic.go:334] "Generic (PLEG): container finished" podID="f82925db-b9b1-4c44-8e2f-467607bd171c" containerID="ce2885ea5d911177646d1398588730429ecad12e0e99e46644fc44a5283eb676" exitCode=0 Jan 23 08:26:24 crc kubenswrapper[4711]: I0123 08:26:24.787738 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nx6zm" event={"ID":"f82925db-b9b1-4c44-8e2f-467607bd171c","Type":"ContainerDied","Data":"ce2885ea5d911177646d1398588730429ecad12e0e99e46644fc44a5283eb676"} Jan 23 08:26:24 crc kubenswrapper[4711]: I0123 08:26:24.787756 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nx6zm" event={"ID":"f82925db-b9b1-4c44-8e2f-467607bd171c","Type":"ContainerStarted","Data":"6cf41192109694644d3f2f58d39e2c3fc5dd13e7743be7a8926742d439242cd6"} Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.442244 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dq742"] Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.443483 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.445812 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.463927 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dq742"] Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.534982 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xsl7\" (UniqueName: \"kubernetes.io/projected/1fa1164d-0e54-42bd-9fe1-88f3a02148b0-kube-api-access-6xsl7\") pod \"community-operators-dq742\" (UID: \"1fa1164d-0e54-42bd-9fe1-88f3a02148b0\") " pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.535053 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fa1164d-0e54-42bd-9fe1-88f3a02148b0-utilities\") pod \"community-operators-dq742\" (UID: \"1fa1164d-0e54-42bd-9fe1-88f3a02148b0\") " pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.535098 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fa1164d-0e54-42bd-9fe1-88f3a02148b0-catalog-content\") pod \"community-operators-dq742\" (UID: \"1fa1164d-0e54-42bd-9fe1-88f3a02148b0\") " pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.636553 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xsl7\" (UniqueName: \"kubernetes.io/projected/1fa1164d-0e54-42bd-9fe1-88f3a02148b0-kube-api-access-6xsl7\") pod \"community-operators-dq742\" (UID: \"1fa1164d-0e54-42bd-9fe1-88f3a02148b0\") " pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.636625 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fa1164d-0e54-42bd-9fe1-88f3a02148b0-utilities\") pod \"community-operators-dq742\" (UID: \"1fa1164d-0e54-42bd-9fe1-88f3a02148b0\") " pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.636669 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fa1164d-0e54-42bd-9fe1-88f3a02148b0-catalog-content\") pod \"community-operators-dq742\" (UID: \"1fa1164d-0e54-42bd-9fe1-88f3a02148b0\") " pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.637099 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fa1164d-0e54-42bd-9fe1-88f3a02148b0-utilities\") pod \"community-operators-dq742\" (UID: \"1fa1164d-0e54-42bd-9fe1-88f3a02148b0\") " pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.637238 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fa1164d-0e54-42bd-9fe1-88f3a02148b0-catalog-content\") pod \"community-operators-dq742\" (UID: \"1fa1164d-0e54-42bd-9fe1-88f3a02148b0\") " pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.642886 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zjxjv"] Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.644278 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.646203 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.656311 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zjxjv"] Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.664181 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xsl7\" (UniqueName: \"kubernetes.io/projected/1fa1164d-0e54-42bd-9fe1-88f3a02148b0-kube-api-access-6xsl7\") pod \"community-operators-dq742\" (UID: \"1fa1164d-0e54-42bd-9fe1-88f3a02148b0\") " pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.738220 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bbf14bc-e2e2-4708-882d-e3234f82409b-utilities\") pod \"certified-operators-zjxjv\" (UID: \"9bbf14bc-e2e2-4708-882d-e3234f82409b\") " pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.738308 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcxnf\" (UniqueName: \"kubernetes.io/projected/9bbf14bc-e2e2-4708-882d-e3234f82409b-kube-api-access-rcxnf\") pod \"certified-operators-zjxjv\" (UID: \"9bbf14bc-e2e2-4708-882d-e3234f82409b\") " pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.738361 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bbf14bc-e2e2-4708-882d-e3234f82409b-catalog-content\") pod \"certified-operators-zjxjv\" (UID: \"9bbf14bc-e2e2-4708-882d-e3234f82409b\") " pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.759572 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.840229 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bbf14bc-e2e2-4708-882d-e3234f82409b-utilities\") pod \"certified-operators-zjxjv\" (UID: \"9bbf14bc-e2e2-4708-882d-e3234f82409b\") " pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.840607 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcxnf\" (UniqueName: \"kubernetes.io/projected/9bbf14bc-e2e2-4708-882d-e3234f82409b-kube-api-access-rcxnf\") pod \"certified-operators-zjxjv\" (UID: \"9bbf14bc-e2e2-4708-882d-e3234f82409b\") " pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.840685 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bbf14bc-e2e2-4708-882d-e3234f82409b-catalog-content\") pod \"certified-operators-zjxjv\" (UID: \"9bbf14bc-e2e2-4708-882d-e3234f82409b\") " pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.840787 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bbf14bc-e2e2-4708-882d-e3234f82409b-utilities\") pod \"certified-operators-zjxjv\" (UID: \"9bbf14bc-e2e2-4708-882d-e3234f82409b\") " pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.841146 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bbf14bc-e2e2-4708-882d-e3234f82409b-catalog-content\") pod \"certified-operators-zjxjv\" (UID: \"9bbf14bc-e2e2-4708-882d-e3234f82409b\") " pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.857049 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcxnf\" (UniqueName: \"kubernetes.io/projected/9bbf14bc-e2e2-4708-882d-e3234f82409b-kube-api-access-rcxnf\") pod \"certified-operators-zjxjv\" (UID: \"9bbf14bc-e2e2-4708-882d-e3234f82409b\") " pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.961351 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.995180 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:26:25 crc kubenswrapper[4711]: I0123 08:26:25.995258 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:26:26 crc kubenswrapper[4711]: I0123 08:26:26.051717 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dq742"] Jan 23 08:26:26 crc kubenswrapper[4711]: I0123 08:26:26.407983 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zjxjv"] Jan 23 08:26:26 crc kubenswrapper[4711]: W0123 08:26:26.411342 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9bbf14bc_e2e2_4708_882d_e3234f82409b.slice/crio-9b2e70858ee8c4d9b26216d2583dfdf542d086dc58a696649a443df2ead8c9da WatchSource:0}: Error finding container 9b2e70858ee8c4d9b26216d2583dfdf542d086dc58a696649a443df2ead8c9da: Status 404 returned error can't find the container with id 9b2e70858ee8c4d9b26216d2583dfdf542d086dc58a696649a443df2ead8c9da Jan 23 08:26:26 crc kubenswrapper[4711]: I0123 08:26:26.812399 4711 generic.go:334] "Generic (PLEG): container finished" podID="f82925db-b9b1-4c44-8e2f-467607bd171c" containerID="0b6cac91dd662ce17b7fb631c909f30eac57cf72015c73f5563146f324160ee4" exitCode=0 Jan 23 08:26:26 crc kubenswrapper[4711]: I0123 08:26:26.812467 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nx6zm" event={"ID":"f82925db-b9b1-4c44-8e2f-467607bd171c","Type":"ContainerDied","Data":"0b6cac91dd662ce17b7fb631c909f30eac57cf72015c73f5563146f324160ee4"} Jan 23 08:26:26 crc kubenswrapper[4711]: I0123 08:26:26.814586 4711 generic.go:334] "Generic (PLEG): container finished" podID="1fa1164d-0e54-42bd-9fe1-88f3a02148b0" containerID="0e58c62c6a7c480a0b4251d92270921ded2d4ac730de2c9ef74763021544f479" exitCode=0 Jan 23 08:26:26 crc kubenswrapper[4711]: I0123 08:26:26.814679 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dq742" event={"ID":"1fa1164d-0e54-42bd-9fe1-88f3a02148b0","Type":"ContainerDied","Data":"0e58c62c6a7c480a0b4251d92270921ded2d4ac730de2c9ef74763021544f479"} Jan 23 08:26:26 crc kubenswrapper[4711]: I0123 08:26:26.814725 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dq742" event={"ID":"1fa1164d-0e54-42bd-9fe1-88f3a02148b0","Type":"ContainerStarted","Data":"2374573b7a45d7fb0678bd0e8c6bd00c3dd4ee72961d30f3a65fb5526e3ffe52"} Jan 23 08:26:26 crc kubenswrapper[4711]: I0123 08:26:26.823066 4711 generic.go:334] "Generic (PLEG): container finished" podID="5fc0d972-014e-4c07-b699-372362c53774" containerID="ce5395d7a20e8b52025faa14ce6fc18a5b385a8761ecde6b6462d7d81f2490bb" exitCode=0 Jan 23 08:26:26 crc kubenswrapper[4711]: I0123 08:26:26.823171 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7t5gv" event={"ID":"5fc0d972-014e-4c07-b699-372362c53774","Type":"ContainerDied","Data":"ce5395d7a20e8b52025faa14ce6fc18a5b385a8761ecde6b6462d7d81f2490bb"} Jan 23 08:26:26 crc kubenswrapper[4711]: I0123 08:26:26.828876 4711 generic.go:334] "Generic (PLEG): container finished" podID="9bbf14bc-e2e2-4708-882d-e3234f82409b" containerID="a3174ef1830b8e2b24715cc1bfa09e2a075c7630bd64f79ff4ac3181478c05da" exitCode=0 Jan 23 08:26:26 crc kubenswrapper[4711]: I0123 08:26:26.828919 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zjxjv" event={"ID":"9bbf14bc-e2e2-4708-882d-e3234f82409b","Type":"ContainerDied","Data":"a3174ef1830b8e2b24715cc1bfa09e2a075c7630bd64f79ff4ac3181478c05da"} Jan 23 08:26:26 crc kubenswrapper[4711]: I0123 08:26:26.828951 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zjxjv" event={"ID":"9bbf14bc-e2e2-4708-882d-e3234f82409b","Type":"ContainerStarted","Data":"9b2e70858ee8c4d9b26216d2583dfdf542d086dc58a696649a443df2ead8c9da"} Jan 23 08:26:28 crc kubenswrapper[4711]: I0123 08:26:28.840938 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7t5gv" event={"ID":"5fc0d972-014e-4c07-b699-372362c53774","Type":"ContainerStarted","Data":"eb7a81e7fc63c20a6d05edccb5d35d85e0403765dcbac66e599f27eb759125bc"} Jan 23 08:26:28 crc kubenswrapper[4711]: I0123 08:26:28.843034 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nx6zm" event={"ID":"f82925db-b9b1-4c44-8e2f-467607bd171c","Type":"ContainerStarted","Data":"4d607639f17870d4b970d13bbf1fdbcbf8361570d50b4d4891f349f76984f27b"} Jan 23 08:26:28 crc kubenswrapper[4711]: I0123 08:26:28.844976 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dq742" event={"ID":"1fa1164d-0e54-42bd-9fe1-88f3a02148b0","Type":"ContainerStarted","Data":"f821b49d04af95c48123a5a6fb452b99400007e8dfb957feb82641f39978b174"} Jan 23 08:26:28 crc kubenswrapper[4711]: I0123 08:26:28.878395 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7t5gv" podStartSLOduration=2.153101453 podStartE2EDuration="5.878364523s" podCreationTimestamp="2026-01-23 08:26:23 +0000 UTC" firstStartedPulling="2026-01-23 08:26:24.787363002 +0000 UTC m=+370.360319370" lastFinishedPulling="2026-01-23 08:26:28.512626072 +0000 UTC m=+374.085582440" observedRunningTime="2026-01-23 08:26:28.861235063 +0000 UTC m=+374.434191431" watchObservedRunningTime="2026-01-23 08:26:28.878364523 +0000 UTC m=+374.451320891" Jan 23 08:26:28 crc kubenswrapper[4711]: I0123 08:26:28.880640 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nx6zm" podStartSLOduration=2.788032071 podStartE2EDuration="5.880627496s" podCreationTimestamp="2026-01-23 08:26:23 +0000 UTC" firstStartedPulling="2026-01-23 08:26:24.789375349 +0000 UTC m=+370.362331717" lastFinishedPulling="2026-01-23 08:26:27.881970774 +0000 UTC m=+373.454927142" observedRunningTime="2026-01-23 08:26:28.878047455 +0000 UTC m=+374.451003843" watchObservedRunningTime="2026-01-23 08:26:28.880627496 +0000 UTC m=+374.453583864" Jan 23 08:26:29 crc kubenswrapper[4711]: E0123 08:26:29.218120 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1fa1164d_0e54_42bd_9fe1_88f3a02148b0.slice/crio-conmon-f821b49d04af95c48123a5a6fb452b99400007e8dfb957feb82641f39978b174.scope\": RecentStats: unable to find data in memory cache]" Jan 23 08:26:29 crc kubenswrapper[4711]: I0123 08:26:29.852930 4711 generic.go:334] "Generic (PLEG): container finished" podID="1fa1164d-0e54-42bd-9fe1-88f3a02148b0" containerID="f821b49d04af95c48123a5a6fb452b99400007e8dfb957feb82641f39978b174" exitCode=0 Jan 23 08:26:29 crc kubenswrapper[4711]: I0123 08:26:29.853043 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dq742" event={"ID":"1fa1164d-0e54-42bd-9fe1-88f3a02148b0","Type":"ContainerDied","Data":"f821b49d04af95c48123a5a6fb452b99400007e8dfb957feb82641f39978b174"} Jan 23 08:26:31 crc kubenswrapper[4711]: I0123 08:26:31.866116 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zjxjv" event={"ID":"9bbf14bc-e2e2-4708-882d-e3234f82409b","Type":"ContainerStarted","Data":"f5bf40a72121decbf862da340d9f8568b6991cb3069e874468523217bf922829"} Jan 23 08:26:33 crc kubenswrapper[4711]: I0123 08:26:33.405718 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:33 crc kubenswrapper[4711]: I0123 08:26:33.407468 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:33 crc kubenswrapper[4711]: I0123 08:26:33.442896 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:33 crc kubenswrapper[4711]: I0123 08:26:33.569806 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:33 crc kubenswrapper[4711]: I0123 08:26:33.569864 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:33 crc kubenswrapper[4711]: I0123 08:26:33.878321 4711 generic.go:334] "Generic (PLEG): container finished" podID="9bbf14bc-e2e2-4708-882d-e3234f82409b" containerID="f5bf40a72121decbf862da340d9f8568b6991cb3069e874468523217bf922829" exitCode=0 Jan 23 08:26:33 crc kubenswrapper[4711]: I0123 08:26:33.878367 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zjxjv" event={"ID":"9bbf14bc-e2e2-4708-882d-e3234f82409b","Type":"ContainerDied","Data":"f5bf40a72121decbf862da340d9f8568b6991cb3069e874468523217bf922829"} Jan 23 08:26:33 crc kubenswrapper[4711]: I0123 08:26:33.924730 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nx6zm" Jan 23 08:26:34 crc kubenswrapper[4711]: I0123 08:26:34.605716 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7t5gv" podUID="5fc0d972-014e-4c07-b699-372362c53774" containerName="registry-server" probeResult="failure" output=< Jan 23 08:26:34 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Jan 23 08:26:34 crc kubenswrapper[4711]: > Jan 23 08:26:37 crc kubenswrapper[4711]: I0123 08:26:37.802895 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-dbf56b754-fwmr8"] Jan 23 08:26:37 crc kubenswrapper[4711]: I0123 08:26:37.803183 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" podUID="95e6cf0a-1b13-4b60-a526-84a9705274c0" containerName="controller-manager" containerID="cri-o://636a5eb1ea1aed0d4526047fac8b69163e9ba8a79cd51b69d06c561f7aabfd00" gracePeriod=30 Jan 23 08:26:39 crc kubenswrapper[4711]: I0123 08:26:39.464214 4711 patch_prober.go:28] interesting pod/controller-manager-dbf56b754-fwmr8 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" start-of-body= Jan 23 08:26:39 crc kubenswrapper[4711]: I0123 08:26:39.464693 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" podUID="95e6cf0a-1b13-4b60-a526-84a9705274c0" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" Jan 23 08:26:39 crc kubenswrapper[4711]: I0123 08:26:39.912445 4711 generic.go:334] "Generic (PLEG): container finished" podID="95e6cf0a-1b13-4b60-a526-84a9705274c0" containerID="636a5eb1ea1aed0d4526047fac8b69163e9ba8a79cd51b69d06c561f7aabfd00" exitCode=0 Jan 23 08:26:39 crc kubenswrapper[4711]: I0123 08:26:39.913006 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" event={"ID":"95e6cf0a-1b13-4b60-a526-84a9705274c0","Type":"ContainerDied","Data":"636a5eb1ea1aed0d4526047fac8b69163e9ba8a79cd51b69d06c561f7aabfd00"} Jan 23 08:26:39 crc kubenswrapper[4711]: I0123 08:26:39.914997 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dq742" event={"ID":"1fa1164d-0e54-42bd-9fe1-88f3a02148b0","Type":"ContainerStarted","Data":"d51d70d26c5376d17e8ccccc68975ccdd29fb5a459b388531ca7d4fa5f0f2fdd"} Jan 23 08:26:39 crc kubenswrapper[4711]: I0123 08:26:39.931643 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dq742" podStartSLOduration=2.283933268 podStartE2EDuration="14.931627578s" podCreationTimestamp="2026-01-23 08:26:25 +0000 UTC" firstStartedPulling="2026-01-23 08:26:26.816193913 +0000 UTC m=+372.389150281" lastFinishedPulling="2026-01-23 08:26:39.463888223 +0000 UTC m=+385.036844591" observedRunningTime="2026-01-23 08:26:39.929220551 +0000 UTC m=+385.502176939" watchObservedRunningTime="2026-01-23 08:26:39.931627578 +0000 UTC m=+385.504583946" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.443391 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.472670 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-588d657775-mng2q"] Jan 23 08:26:40 crc kubenswrapper[4711]: E0123 08:26:40.472937 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95e6cf0a-1b13-4b60-a526-84a9705274c0" containerName="controller-manager" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.472954 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="95e6cf0a-1b13-4b60-a526-84a9705274c0" containerName="controller-manager" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.473105 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="95e6cf0a-1b13-4b60-a526-84a9705274c0" containerName="controller-manager" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.473618 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.530219 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-588d657775-mng2q"] Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.541865 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjmxc\" (UniqueName: \"kubernetes.io/projected/95e6cf0a-1b13-4b60-a526-84a9705274c0-kube-api-access-sjmxc\") pod \"95e6cf0a-1b13-4b60-a526-84a9705274c0\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.541999 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/95e6cf0a-1b13-4b60-a526-84a9705274c0-serving-cert\") pod \"95e6cf0a-1b13-4b60-a526-84a9705274c0\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.542030 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-proxy-ca-bundles\") pod \"95e6cf0a-1b13-4b60-a526-84a9705274c0\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.542071 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-client-ca\") pod \"95e6cf0a-1b13-4b60-a526-84a9705274c0\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.542092 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-config\") pod \"95e6cf0a-1b13-4b60-a526-84a9705274c0\" (UID: \"95e6cf0a-1b13-4b60-a526-84a9705274c0\") " Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.542215 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pbjv\" (UniqueName: \"kubernetes.io/projected/801336b2-7a0c-454e-9a96-d9e6d4e22605-kube-api-access-6pbjv\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.542237 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/801336b2-7a0c-454e-9a96-d9e6d4e22605-config\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.542289 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/801336b2-7a0c-454e-9a96-d9e6d4e22605-client-ca\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.542312 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/801336b2-7a0c-454e-9a96-d9e6d4e22605-serving-cert\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.542336 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/801336b2-7a0c-454e-9a96-d9e6d4e22605-proxy-ca-bundles\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.543729 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "95e6cf0a-1b13-4b60-a526-84a9705274c0" (UID: "95e6cf0a-1b13-4b60-a526-84a9705274c0"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.543839 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-client-ca" (OuterVolumeSpecName: "client-ca") pod "95e6cf0a-1b13-4b60-a526-84a9705274c0" (UID: "95e6cf0a-1b13-4b60-a526-84a9705274c0"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.544164 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-config" (OuterVolumeSpecName: "config") pod "95e6cf0a-1b13-4b60-a526-84a9705274c0" (UID: "95e6cf0a-1b13-4b60-a526-84a9705274c0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.551722 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95e6cf0a-1b13-4b60-a526-84a9705274c0-kube-api-access-sjmxc" (OuterVolumeSpecName: "kube-api-access-sjmxc") pod "95e6cf0a-1b13-4b60-a526-84a9705274c0" (UID: "95e6cf0a-1b13-4b60-a526-84a9705274c0"). InnerVolumeSpecName "kube-api-access-sjmxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.560340 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95e6cf0a-1b13-4b60-a526-84a9705274c0-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "95e6cf0a-1b13-4b60-a526-84a9705274c0" (UID: "95e6cf0a-1b13-4b60-a526-84a9705274c0"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.643595 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/801336b2-7a0c-454e-9a96-d9e6d4e22605-config\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.645117 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/801336b2-7a0c-454e-9a96-d9e6d4e22605-config\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.646032 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/801336b2-7a0c-454e-9a96-d9e6d4e22605-client-ca\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.646773 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/801336b2-7a0c-454e-9a96-d9e6d4e22605-serving-cert\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.646886 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/801336b2-7a0c-454e-9a96-d9e6d4e22605-proxy-ca-bundles\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.646790 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/801336b2-7a0c-454e-9a96-d9e6d4e22605-client-ca\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.646994 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pbjv\" (UniqueName: \"kubernetes.io/projected/801336b2-7a0c-454e-9a96-d9e6d4e22605-kube-api-access-6pbjv\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.647063 4711 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/95e6cf0a-1b13-4b60-a526-84a9705274c0-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.647075 4711 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.647088 4711 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-client-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.647098 4711 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95e6cf0a-1b13-4b60-a526-84a9705274c0-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.647108 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjmxc\" (UniqueName: \"kubernetes.io/projected/95e6cf0a-1b13-4b60-a526-84a9705274c0-kube-api-access-sjmxc\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.647840 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/801336b2-7a0c-454e-9a96-d9e6d4e22605-proxy-ca-bundles\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.649926 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/801336b2-7a0c-454e-9a96-d9e6d4e22605-serving-cert\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.664083 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pbjv\" (UniqueName: \"kubernetes.io/projected/801336b2-7a0c-454e-9a96-d9e6d4e22605-kube-api-access-6pbjv\") pod \"controller-manager-588d657775-mng2q\" (UID: \"801336b2-7a0c-454e-9a96-d9e6d4e22605\") " pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.789637 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.928424 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zjxjv" event={"ID":"9bbf14bc-e2e2-4708-882d-e3234f82409b","Type":"ContainerStarted","Data":"0497d5e2cc24187cd363403e86eb2a1ed70a7b20ad669b416d95cba6d545fb4d"} Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.931622 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" event={"ID":"95e6cf0a-1b13-4b60-a526-84a9705274c0","Type":"ContainerDied","Data":"b0773b06258d70fc12f16adae18f63ab0569d5a8417096954d54e57f14d3028b"} Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.931675 4711 scope.go:117] "RemoveContainer" containerID="636a5eb1ea1aed0d4526047fac8b69163e9ba8a79cd51b69d06c561f7aabfd00" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.931937 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-dbf56b754-fwmr8" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.971998 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zjxjv" podStartSLOduration=2.5954164889999998 podStartE2EDuration="15.97197107s" podCreationTimestamp="2026-01-23 08:26:25 +0000 UTC" firstStartedPulling="2026-01-23 08:26:26.830729731 +0000 UTC m=+372.403686099" lastFinishedPulling="2026-01-23 08:26:40.207284302 +0000 UTC m=+385.780240680" observedRunningTime="2026-01-23 08:26:40.944778909 +0000 UTC m=+386.517735277" watchObservedRunningTime="2026-01-23 08:26:40.97197107 +0000 UTC m=+386.544927438" Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.972380 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-dbf56b754-fwmr8"] Jan 23 08:26:40 crc kubenswrapper[4711]: I0123 08:26:40.979266 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-dbf56b754-fwmr8"] Jan 23 08:26:41 crc kubenswrapper[4711]: I0123 08:26:41.174565 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-588d657775-mng2q"] Jan 23 08:26:41 crc kubenswrapper[4711]: W0123 08:26:41.181466 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod801336b2_7a0c_454e_9a96_d9e6d4e22605.slice/crio-41c4f441a73500f7e57b98d4c90ac83a2d506e35d48b064952158d1369b45c93 WatchSource:0}: Error finding container 41c4f441a73500f7e57b98d4c90ac83a2d506e35d48b064952158d1369b45c93: Status 404 returned error can't find the container with id 41c4f441a73500f7e57b98d4c90ac83a2d506e35d48b064952158d1369b45c93 Jan 23 08:26:41 crc kubenswrapper[4711]: I0123 08:26:41.482992 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95e6cf0a-1b13-4b60-a526-84a9705274c0" path="/var/lib/kubelet/pods/95e6cf0a-1b13-4b60-a526-84a9705274c0/volumes" Jan 23 08:26:41 crc kubenswrapper[4711]: I0123 08:26:41.938957 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-588d657775-mng2q" event={"ID":"801336b2-7a0c-454e-9a96-d9e6d4e22605","Type":"ContainerStarted","Data":"f00618a47894605c74684d9b39391ba967f24bd217b1ee1e79b65c7742728818"} Jan 23 08:26:41 crc kubenswrapper[4711]: I0123 08:26:41.939492 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:41 crc kubenswrapper[4711]: I0123 08:26:41.940223 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-588d657775-mng2q" event={"ID":"801336b2-7a0c-454e-9a96-d9e6d4e22605","Type":"ContainerStarted","Data":"41c4f441a73500f7e57b98d4c90ac83a2d506e35d48b064952158d1369b45c93"} Jan 23 08:26:41 crc kubenswrapper[4711]: I0123 08:26:41.946162 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-588d657775-mng2q" Jan 23 08:26:41 crc kubenswrapper[4711]: I0123 08:26:41.970216 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-588d657775-mng2q" podStartSLOduration=4.970197653 podStartE2EDuration="4.970197653s" podCreationTimestamp="2026-01-23 08:26:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:26:41.966024993 +0000 UTC m=+387.538981361" watchObservedRunningTime="2026-01-23 08:26:41.970197653 +0000 UTC m=+387.543154021" Jan 23 08:26:43 crc kubenswrapper[4711]: I0123 08:26:43.620382 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:43 crc kubenswrapper[4711]: I0123 08:26:43.662053 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7t5gv" Jan 23 08:26:45 crc kubenswrapper[4711]: I0123 08:26:45.760375 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:45 crc kubenswrapper[4711]: I0123 08:26:45.760984 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:45 crc kubenswrapper[4711]: I0123 08:26:45.808704 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:45 crc kubenswrapper[4711]: I0123 08:26:45.962046 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:45 crc kubenswrapper[4711]: I0123 08:26:45.962131 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:46 crc kubenswrapper[4711]: I0123 08:26:46.017976 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:46 crc kubenswrapper[4711]: I0123 08:26:46.030691 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dq742" Jan 23 08:26:46 crc kubenswrapper[4711]: I0123 08:26:46.062417 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zjxjv" Jan 23 08:26:49 crc kubenswrapper[4711]: I0123 08:26:49.202309 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" podUID="bb9eeabb-23bb-45db-bcbb-aae7c165f260" containerName="registry" containerID="cri-o://19992feb250e2ad43b750f498864bcf6e5986e228b53b78613c6cf1ec344f5b1" gracePeriod=30 Jan 23 08:26:49 crc kubenswrapper[4711]: I0123 08:26:49.783726 4711 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-58wxx container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.22:5000/healthz\": dial tcp 10.217.0.22:5000: connect: connection refused" start-of-body= Jan 23 08:26:49 crc kubenswrapper[4711]: I0123 08:26:49.783788 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" podUID="bb9eeabb-23bb-45db-bcbb-aae7c165f260" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.22:5000/healthz\": dial tcp 10.217.0.22:5000: connect: connection refused" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.025842 4711 generic.go:334] "Generic (PLEG): container finished" podID="bb9eeabb-23bb-45db-bcbb-aae7c165f260" containerID="19992feb250e2ad43b750f498864bcf6e5986e228b53b78613c6cf1ec344f5b1" exitCode=0 Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.025926 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" event={"ID":"bb9eeabb-23bb-45db-bcbb-aae7c165f260","Type":"ContainerDied","Data":"19992feb250e2ad43b750f498864bcf6e5986e228b53b78613c6cf1ec344f5b1"} Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.813427 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.945110 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bb9eeabb-23bb-45db-bcbb-aae7c165f260-installation-pull-secrets\") pod \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.945173 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb9eeabb-23bb-45db-bcbb-aae7c165f260-trusted-ca\") pod \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.945228 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xm8sg\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-kube-api-access-xm8sg\") pod \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.945251 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-registry-tls\") pod \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.945281 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bb9eeabb-23bb-45db-bcbb-aae7c165f260-registry-certificates\") pod \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.945313 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bb9eeabb-23bb-45db-bcbb-aae7c165f260-ca-trust-extracted\") pod \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.945471 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.945549 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-bound-sa-token\") pod \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\" (UID: \"bb9eeabb-23bb-45db-bcbb-aae7c165f260\") " Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.947956 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb9eeabb-23bb-45db-bcbb-aae7c165f260-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bb9eeabb-23bb-45db-bcbb-aae7c165f260" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.947976 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb9eeabb-23bb-45db-bcbb-aae7c165f260-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "bb9eeabb-23bb-45db-bcbb-aae7c165f260" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.951667 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-kube-api-access-xm8sg" (OuterVolumeSpecName: "kube-api-access-xm8sg") pod "bb9eeabb-23bb-45db-bcbb-aae7c165f260" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260"). InnerVolumeSpecName "kube-api-access-xm8sg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.951948 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "bb9eeabb-23bb-45db-bcbb-aae7c165f260" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.954318 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb9eeabb-23bb-45db-bcbb-aae7c165f260-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "bb9eeabb-23bb-45db-bcbb-aae7c165f260" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.964582 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb9eeabb-23bb-45db-bcbb-aae7c165f260-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "bb9eeabb-23bb-45db-bcbb-aae7c165f260" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.965236 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bb9eeabb-23bb-45db-bcbb-aae7c165f260" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:53.968059 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "bb9eeabb-23bb-45db-bcbb-aae7c165f260" (UID: "bb9eeabb-23bb-45db-bcbb-aae7c165f260"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:54.036180 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" event={"ID":"bb9eeabb-23bb-45db-bcbb-aae7c165f260","Type":"ContainerDied","Data":"6d0ece91f10eab8a0f149d06e7f0e4de29b19f78af29354e7b9a8e207c6780d7"} Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:54.036232 4711 scope.go:117] "RemoveContainer" containerID="19992feb250e2ad43b750f498864bcf6e5986e228b53b78613c6cf1ec344f5b1" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:54.036256 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-58wxx" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:54.047008 4711 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bb9eeabb-23bb-45db-bcbb-aae7c165f260-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:54.047034 4711 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bb9eeabb-23bb-45db-bcbb-aae7c165f260-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:54.047047 4711 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:54.047088 4711 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bb9eeabb-23bb-45db-bcbb-aae7c165f260-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:54.047101 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb9eeabb-23bb-45db-bcbb-aae7c165f260-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:54.047112 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xm8sg\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-kube-api-access-xm8sg\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:54.047123 4711 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bb9eeabb-23bb-45db-bcbb-aae7c165f260-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:54.073861 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-58wxx"] Jan 23 08:26:54 crc kubenswrapper[4711]: I0123 08:26:54.076762 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-58wxx"] Jan 23 08:26:55 crc kubenswrapper[4711]: I0123 08:26:55.482015 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb9eeabb-23bb-45db-bcbb-aae7c165f260" path="/var/lib/kubelet/pods/bb9eeabb-23bb-45db-bcbb-aae7c165f260/volumes" Jan 23 08:26:55 crc kubenswrapper[4711]: I0123 08:26:55.993594 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:26:55 crc kubenswrapper[4711]: I0123 08:26:55.994173 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:27:25 crc kubenswrapper[4711]: I0123 08:27:25.994232 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:27:25 crc kubenswrapper[4711]: I0123 08:27:25.995061 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:27:25 crc kubenswrapper[4711]: I0123 08:27:25.995142 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:27:25 crc kubenswrapper[4711]: I0123 08:27:25.996165 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b064804566a711ebf557e11734763f83796197487bf136fb43efc8f895f896a6"} pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 23 08:27:25 crc kubenswrapper[4711]: I0123 08:27:25.996274 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" containerID="cri-o://b064804566a711ebf557e11734763f83796197487bf136fb43efc8f895f896a6" gracePeriod=600 Jan 23 08:27:27 crc kubenswrapper[4711]: I0123 08:27:27.245838 4711 generic.go:334] "Generic (PLEG): container finished" podID="3846d4e0-cfda-4e0b-8747-85267de12736" containerID="b064804566a711ebf557e11734763f83796197487bf136fb43efc8f895f896a6" exitCode=0 Jan 23 08:27:27 crc kubenswrapper[4711]: I0123 08:27:27.245910 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerDied","Data":"b064804566a711ebf557e11734763f83796197487bf136fb43efc8f895f896a6"} Jan 23 08:27:27 crc kubenswrapper[4711]: I0123 08:27:27.246415 4711 scope.go:117] "RemoveContainer" containerID="4ea637b539fe1f8c2d487f52c7e076322a3d5202fffb5c915f30267c5a7d3eb5" Jan 23 08:27:28 crc kubenswrapper[4711]: I0123 08:27:28.255423 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"dcb49a247b6f118496bffafa6bebed1af2fd5b9f478c43e4444025730a1bbd84"} Jan 23 08:29:15 crc kubenswrapper[4711]: I0123 08:29:15.518381 4711 scope.go:117] "RemoveContainer" containerID="a07f2a6a656138a829c8c6c9a6eecc2ae3152f76152e02220387a706c4ce4b43" Jan 23 08:29:15 crc kubenswrapper[4711]: I0123 08:29:15.547719 4711 scope.go:117] "RemoveContainer" containerID="869d10e7d3b796c75b6cef6f84dced38da1c4a28ab3f7e158a18ca6d17d4b42c" Jan 23 08:29:15 crc kubenswrapper[4711]: I0123 08:29:15.568829 4711 scope.go:117] "RemoveContainer" containerID="c14b890af9ee8b597f6605dbdd6e2cd3337f8cbf9abb549611b3acc882a8d219" Jan 23 08:29:15 crc kubenswrapper[4711]: I0123 08:29:15.585304 4711 scope.go:117] "RemoveContainer" containerID="f747b0088ff8904abc130442a0b31eb0267e13f212d15cd048f15a78b8251ff2" Jan 23 08:29:15 crc kubenswrapper[4711]: I0123 08:29:15.603722 4711 scope.go:117] "RemoveContainer" containerID="afadebbd640434a996a5a893e437e307dfc813dbccdae4ea361bb3cb6e790178" Jan 23 08:29:15 crc kubenswrapper[4711]: I0123 08:29:15.618594 4711 scope.go:117] "RemoveContainer" containerID="8ce99308f919652241bd476f7f200338c78e4484fcfd5c80ee01130db94a9245" Jan 23 08:29:15 crc kubenswrapper[4711]: I0123 08:29:15.633747 4711 scope.go:117] "RemoveContainer" containerID="1f4f0199628880e379c4fd6615da6ba8f9a4f4d398cda362e89af71a80f40522" Jan 23 08:29:55 crc kubenswrapper[4711]: I0123 08:29:55.993596 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:29:55 crc kubenswrapper[4711]: I0123 08:29:55.994111 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.162149 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w"] Jan 23 08:30:00 crc kubenswrapper[4711]: E0123 08:30:00.162717 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb9eeabb-23bb-45db-bcbb-aae7c165f260" containerName="registry" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.162734 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb9eeabb-23bb-45db-bcbb-aae7c165f260" containerName="registry" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.162863 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb9eeabb-23bb-45db-bcbb-aae7c165f260" containerName="registry" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.163353 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.165451 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.166745 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.179051 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w"] Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.241530 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76s4k\" (UniqueName: \"kubernetes.io/projected/fcbea6f3-c327-4d95-b808-d2a9afd0804d-kube-api-access-76s4k\") pod \"collect-profiles-29485950-sl65w\" (UID: \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.241584 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcbea6f3-c327-4d95-b808-d2a9afd0804d-config-volume\") pod \"collect-profiles-29485950-sl65w\" (UID: \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.241615 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcbea6f3-c327-4d95-b808-d2a9afd0804d-secret-volume\") pod \"collect-profiles-29485950-sl65w\" (UID: \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.343410 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76s4k\" (UniqueName: \"kubernetes.io/projected/fcbea6f3-c327-4d95-b808-d2a9afd0804d-kube-api-access-76s4k\") pod \"collect-profiles-29485950-sl65w\" (UID: \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.343990 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcbea6f3-c327-4d95-b808-d2a9afd0804d-config-volume\") pod \"collect-profiles-29485950-sl65w\" (UID: \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.344348 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcbea6f3-c327-4d95-b808-d2a9afd0804d-secret-volume\") pod \"collect-profiles-29485950-sl65w\" (UID: \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.344859 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcbea6f3-c327-4d95-b808-d2a9afd0804d-config-volume\") pod \"collect-profiles-29485950-sl65w\" (UID: \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.361362 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcbea6f3-c327-4d95-b808-d2a9afd0804d-secret-volume\") pod \"collect-profiles-29485950-sl65w\" (UID: \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.361668 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76s4k\" (UniqueName: \"kubernetes.io/projected/fcbea6f3-c327-4d95-b808-d2a9afd0804d-kube-api-access-76s4k\") pod \"collect-profiles-29485950-sl65w\" (UID: \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.485608 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:00 crc kubenswrapper[4711]: I0123 08:30:00.670237 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w"] Jan 23 08:30:01 crc kubenswrapper[4711]: I0123 08:30:01.178581 4711 generic.go:334] "Generic (PLEG): container finished" podID="fcbea6f3-c327-4d95-b808-d2a9afd0804d" containerID="af34ba8213df1729fcce4652c19e53333b80a85910a439fd970daae5e77a2ea7" exitCode=0 Jan 23 08:30:01 crc kubenswrapper[4711]: I0123 08:30:01.178628 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" event={"ID":"fcbea6f3-c327-4d95-b808-d2a9afd0804d","Type":"ContainerDied","Data":"af34ba8213df1729fcce4652c19e53333b80a85910a439fd970daae5e77a2ea7"} Jan 23 08:30:01 crc kubenswrapper[4711]: I0123 08:30:01.178935 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" event={"ID":"fcbea6f3-c327-4d95-b808-d2a9afd0804d","Type":"ContainerStarted","Data":"136b34be888f9e34bea9cd76d86a162004a9d2ee4c2c3ecd9fc9390d9c431200"} Jan 23 08:30:02 crc kubenswrapper[4711]: I0123 08:30:02.434547 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:02 crc kubenswrapper[4711]: I0123 08:30:02.481321 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76s4k\" (UniqueName: \"kubernetes.io/projected/fcbea6f3-c327-4d95-b808-d2a9afd0804d-kube-api-access-76s4k\") pod \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\" (UID: \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\") " Jan 23 08:30:02 crc kubenswrapper[4711]: I0123 08:30:02.481391 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcbea6f3-c327-4d95-b808-d2a9afd0804d-secret-volume\") pod \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\" (UID: \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\") " Jan 23 08:30:02 crc kubenswrapper[4711]: I0123 08:30:02.481457 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcbea6f3-c327-4d95-b808-d2a9afd0804d-config-volume\") pod \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\" (UID: \"fcbea6f3-c327-4d95-b808-d2a9afd0804d\") " Jan 23 08:30:02 crc kubenswrapper[4711]: I0123 08:30:02.482165 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcbea6f3-c327-4d95-b808-d2a9afd0804d-config-volume" (OuterVolumeSpecName: "config-volume") pod "fcbea6f3-c327-4d95-b808-d2a9afd0804d" (UID: "fcbea6f3-c327-4d95-b808-d2a9afd0804d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:30:02 crc kubenswrapper[4711]: I0123 08:30:02.482381 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcbea6f3-c327-4d95-b808-d2a9afd0804d-config-volume\") on node \"crc\" DevicePath \"\"" Jan 23 08:30:02 crc kubenswrapper[4711]: I0123 08:30:02.486165 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcbea6f3-c327-4d95-b808-d2a9afd0804d-kube-api-access-76s4k" (OuterVolumeSpecName: "kube-api-access-76s4k") pod "fcbea6f3-c327-4d95-b808-d2a9afd0804d" (UID: "fcbea6f3-c327-4d95-b808-d2a9afd0804d"). InnerVolumeSpecName "kube-api-access-76s4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:30:02 crc kubenswrapper[4711]: I0123 08:30:02.486178 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcbea6f3-c327-4d95-b808-d2a9afd0804d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fcbea6f3-c327-4d95-b808-d2a9afd0804d" (UID: "fcbea6f3-c327-4d95-b808-d2a9afd0804d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:30:02 crc kubenswrapper[4711]: I0123 08:30:02.583984 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76s4k\" (UniqueName: \"kubernetes.io/projected/fcbea6f3-c327-4d95-b808-d2a9afd0804d-kube-api-access-76s4k\") on node \"crc\" DevicePath \"\"" Jan 23 08:30:02 crc kubenswrapper[4711]: I0123 08:30:02.584031 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcbea6f3-c327-4d95-b808-d2a9afd0804d-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 23 08:30:03 crc kubenswrapper[4711]: I0123 08:30:03.191627 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" event={"ID":"fcbea6f3-c327-4d95-b808-d2a9afd0804d","Type":"ContainerDied","Data":"136b34be888f9e34bea9cd76d86a162004a9d2ee4c2c3ecd9fc9390d9c431200"} Jan 23 08:30:03 crc kubenswrapper[4711]: I0123 08:30:03.191660 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="136b34be888f9e34bea9cd76d86a162004a9d2ee4c2c3ecd9fc9390d9c431200" Jan 23 08:30:03 crc kubenswrapper[4711]: I0123 08:30:03.191710 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485950-sl65w" Jan 23 08:30:15 crc kubenswrapper[4711]: I0123 08:30:15.681965 4711 scope.go:117] "RemoveContainer" containerID="755633f637756d4a7d83d5c5bd89f565cdb0955e3c2dc1326c8ffcf6507fc381" Jan 23 08:30:15 crc kubenswrapper[4711]: I0123 08:30:15.701593 4711 scope.go:117] "RemoveContainer" containerID="42b79e0cc88ccf00ce44cd26a24897cbff7bf01cefb1fcd1afbe08dffd71f338" Jan 23 08:30:15 crc kubenswrapper[4711]: I0123 08:30:15.732523 4711 scope.go:117] "RemoveContainer" containerID="9d810b44218c9d74c6c8be3341a2a9aac420c4826ace8be94e3b71d30a21e7d4" Jan 23 08:30:15 crc kubenswrapper[4711]: I0123 08:30:15.758339 4711 scope.go:117] "RemoveContainer" containerID="8ff61d4563602564033e609655da6c2bdeef004c75029401ac22e52ede6f9268" Jan 23 08:30:25 crc kubenswrapper[4711]: I0123 08:30:25.993787 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:30:25 crc kubenswrapper[4711]: I0123 08:30:25.994397 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:30:55 crc kubenswrapper[4711]: I0123 08:30:55.993943 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:30:55 crc kubenswrapper[4711]: I0123 08:30:55.996598 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:30:55 crc kubenswrapper[4711]: I0123 08:30:55.996653 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:30:55 crc kubenswrapper[4711]: I0123 08:30:55.997299 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dcb49a247b6f118496bffafa6bebed1af2fd5b9f478c43e4444025730a1bbd84"} pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 23 08:30:55 crc kubenswrapper[4711]: I0123 08:30:55.997361 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" containerID="cri-o://dcb49a247b6f118496bffafa6bebed1af2fd5b9f478c43e4444025730a1bbd84" gracePeriod=600 Jan 23 08:30:56 crc kubenswrapper[4711]: I0123 08:30:56.510898 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerDied","Data":"dcb49a247b6f118496bffafa6bebed1af2fd5b9f478c43e4444025730a1bbd84"} Jan 23 08:30:56 crc kubenswrapper[4711]: I0123 08:30:56.510998 4711 generic.go:334] "Generic (PLEG): container finished" podID="3846d4e0-cfda-4e0b-8747-85267de12736" containerID="dcb49a247b6f118496bffafa6bebed1af2fd5b9f478c43e4444025730a1bbd84" exitCode=0 Jan 23 08:30:56 crc kubenswrapper[4711]: I0123 08:30:56.511882 4711 scope.go:117] "RemoveContainer" containerID="b064804566a711ebf557e11734763f83796197487bf136fb43efc8f895f896a6" Jan 23 08:30:56 crc kubenswrapper[4711]: I0123 08:30:56.511912 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"09dfb01b93dac17d4e6980fd3e7ea0054118ce3392de559f35289e1fef65f8f9"} Jan 23 08:31:15 crc kubenswrapper[4711]: I0123 08:31:15.795333 4711 scope.go:117] "RemoveContainer" containerID="da240b8885ee249428d29acbcca2a09d0ef6adeb5bedbf84c08e6707aecb91d1" Jan 23 08:31:15 crc kubenswrapper[4711]: I0123 08:31:15.811937 4711 scope.go:117] "RemoveContainer" containerID="d201da90ae40402dfe94d1564ffb7b432cfed3f65f708eb302d38104df07574a" Jan 23 08:31:15 crc kubenswrapper[4711]: I0123 08:31:15.824573 4711 scope.go:117] "RemoveContainer" containerID="6bf029d6e69febaae87ae3a440c7a5630dd2120cfbd6f99db0fe2f896ee02e4b" Jan 23 08:31:15 crc kubenswrapper[4711]: I0123 08:31:15.836704 4711 scope.go:117] "RemoveContainer" containerID="aafaec601ef4340da071c13ae3b0d6eb58e8c9a27b55af2de7dcf0248244fccf" Jan 23 08:31:15 crc kubenswrapper[4711]: I0123 08:31:15.849426 4711 scope.go:117] "RemoveContainer" containerID="afa046fdb95761c8923dbf7af7860bf7b9452121fc3869e6b254dc5b78feadd4" Jan 23 08:31:15 crc kubenswrapper[4711]: I0123 08:31:15.871960 4711 scope.go:117] "RemoveContainer" containerID="48810ebd2ea1e79769533976b8c4270dfc2a1a28a0d87e7ede8f271770701042" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.209457 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz"] Jan 23 08:32:03 crc kubenswrapper[4711]: E0123 08:32:03.210253 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcbea6f3-c327-4d95-b808-d2a9afd0804d" containerName="collect-profiles" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.210265 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcbea6f3-c327-4d95-b808-d2a9afd0804d" containerName="collect-profiles" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.210362 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcbea6f3-c327-4d95-b808-d2a9afd0804d" containerName="collect-profiles" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.211001 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.213379 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.220971 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz"] Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.355421 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhztn\" (UniqueName: \"kubernetes.io/projected/8362e2cd-76b3-44f8-8d22-b8542c471584-kube-api-access-mhztn\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz\" (UID: \"8362e2cd-76b3-44f8-8d22-b8542c471584\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.355476 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8362e2cd-76b3-44f8-8d22-b8542c471584-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz\" (UID: \"8362e2cd-76b3-44f8-8d22-b8542c471584\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.355656 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8362e2cd-76b3-44f8-8d22-b8542c471584-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz\" (UID: \"8362e2cd-76b3-44f8-8d22-b8542c471584\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.456584 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhztn\" (UniqueName: \"kubernetes.io/projected/8362e2cd-76b3-44f8-8d22-b8542c471584-kube-api-access-mhztn\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz\" (UID: \"8362e2cd-76b3-44f8-8d22-b8542c471584\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.456640 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8362e2cd-76b3-44f8-8d22-b8542c471584-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz\" (UID: \"8362e2cd-76b3-44f8-8d22-b8542c471584\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.456680 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8362e2cd-76b3-44f8-8d22-b8542c471584-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz\" (UID: \"8362e2cd-76b3-44f8-8d22-b8542c471584\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.457075 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8362e2cd-76b3-44f8-8d22-b8542c471584-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz\" (UID: \"8362e2cd-76b3-44f8-8d22-b8542c471584\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.457120 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8362e2cd-76b3-44f8-8d22-b8542c471584-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz\" (UID: \"8362e2cd-76b3-44f8-8d22-b8542c471584\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.481160 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhztn\" (UniqueName: \"kubernetes.io/projected/8362e2cd-76b3-44f8-8d22-b8542c471584-kube-api-access-mhztn\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz\" (UID: \"8362e2cd-76b3-44f8-8d22-b8542c471584\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.525533 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.700575 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz"] Jan 23 08:32:03 crc kubenswrapper[4711]: I0123 08:32:03.889117 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" event={"ID":"8362e2cd-76b3-44f8-8d22-b8542c471584","Type":"ContainerStarted","Data":"e16e9d40e24049611d6388c3033224a4a3ee95a9b108f7540af597ca8ca3db99"} Jan 23 08:32:05 crc kubenswrapper[4711]: I0123 08:32:05.902251 4711 generic.go:334] "Generic (PLEG): container finished" podID="8362e2cd-76b3-44f8-8d22-b8542c471584" containerID="8c5cd714150015dde4a612712368169375ed9a9c44df404b32251904283b8b54" exitCode=0 Jan 23 08:32:05 crc kubenswrapper[4711]: I0123 08:32:05.902377 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" event={"ID":"8362e2cd-76b3-44f8-8d22-b8542c471584","Type":"ContainerDied","Data":"8c5cd714150015dde4a612712368169375ed9a9c44df404b32251904283b8b54"} Jan 23 08:32:05 crc kubenswrapper[4711]: I0123 08:32:05.904551 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 23 08:32:12 crc kubenswrapper[4711]: I0123 08:32:12.945221 4711 generic.go:334] "Generic (PLEG): container finished" podID="8362e2cd-76b3-44f8-8d22-b8542c471584" containerID="b4e4472b1ad1433121c4112672ddf0fb06849461d5944d9c89b3ed8e83c18b34" exitCode=0 Jan 23 08:32:12 crc kubenswrapper[4711]: I0123 08:32:12.945267 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" event={"ID":"8362e2cd-76b3-44f8-8d22-b8542c471584","Type":"ContainerDied","Data":"b4e4472b1ad1433121c4112672ddf0fb06849461d5944d9c89b3ed8e83c18b34"} Jan 23 08:32:13 crc kubenswrapper[4711]: I0123 08:32:13.953256 4711 generic.go:334] "Generic (PLEG): container finished" podID="8362e2cd-76b3-44f8-8d22-b8542c471584" containerID="835c9031c8a9daef352fa7b1be2452866b43a615ad1d6152716d8552ccf108f8" exitCode=0 Jan 23 08:32:13 crc kubenswrapper[4711]: I0123 08:32:13.953322 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" event={"ID":"8362e2cd-76b3-44f8-8d22-b8542c471584","Type":"ContainerDied","Data":"835c9031c8a9daef352fa7b1be2452866b43a615ad1d6152716d8552ccf108f8"} Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.237044 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.412088 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhztn\" (UniqueName: \"kubernetes.io/projected/8362e2cd-76b3-44f8-8d22-b8542c471584-kube-api-access-mhztn\") pod \"8362e2cd-76b3-44f8-8d22-b8542c471584\" (UID: \"8362e2cd-76b3-44f8-8d22-b8542c471584\") " Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.412310 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8362e2cd-76b3-44f8-8d22-b8542c471584-bundle\") pod \"8362e2cd-76b3-44f8-8d22-b8542c471584\" (UID: \"8362e2cd-76b3-44f8-8d22-b8542c471584\") " Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.412359 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8362e2cd-76b3-44f8-8d22-b8542c471584-util\") pod \"8362e2cd-76b3-44f8-8d22-b8542c471584\" (UID: \"8362e2cd-76b3-44f8-8d22-b8542c471584\") " Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.412974 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8362e2cd-76b3-44f8-8d22-b8542c471584-bundle" (OuterVolumeSpecName: "bundle") pod "8362e2cd-76b3-44f8-8d22-b8542c471584" (UID: "8362e2cd-76b3-44f8-8d22-b8542c471584"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.419771 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8362e2cd-76b3-44f8-8d22-b8542c471584-kube-api-access-mhztn" (OuterVolumeSpecName: "kube-api-access-mhztn") pod "8362e2cd-76b3-44f8-8d22-b8542c471584" (UID: "8362e2cd-76b3-44f8-8d22-b8542c471584"). InnerVolumeSpecName "kube-api-access-mhztn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.422468 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8362e2cd-76b3-44f8-8d22-b8542c471584-util" (OuterVolumeSpecName: "util") pod "8362e2cd-76b3-44f8-8d22-b8542c471584" (UID: "8362e2cd-76b3-44f8-8d22-b8542c471584"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.514636 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhztn\" (UniqueName: \"kubernetes.io/projected/8362e2cd-76b3-44f8-8d22-b8542c471584-kube-api-access-mhztn\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.514738 4711 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8362e2cd-76b3-44f8-8d22-b8542c471584-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.514805 4711 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8362e2cd-76b3-44f8-8d22-b8542c471584-util\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.972956 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" event={"ID":"8362e2cd-76b3-44f8-8d22-b8542c471584","Type":"ContainerDied","Data":"e16e9d40e24049611d6388c3033224a4a3ee95a9b108f7540af597ca8ca3db99"} Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.973005 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e16e9d40e24049611d6388c3033224a4a3ee95a9b108f7540af597ca8ca3db99" Jan 23 08:32:15 crc kubenswrapper[4711]: I0123 08:32:15.973065 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz" Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.274000 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jmffw"] Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.274428 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovn-controller" containerID="cri-o://65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154" gracePeriod=30 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.274482 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="nbdb" containerID="cri-o://37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d" gracePeriod=30 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.274559 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovn-acl-logging" containerID="cri-o://34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f" gracePeriod=30 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.274540 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289" gracePeriod=30 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.274565 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="northd" containerID="cri-o://66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5" gracePeriod=30 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.274555 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="kube-rbac-proxy-node" containerID="cri-o://a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649" gracePeriod=30 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.274584 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="sbdb" containerID="cri-o://793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365" gracePeriod=30 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.306179 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" containerID="cri-o://2b9ff6ab4f162a1d535c08524bef44284e952c46441e4985ab3a72434482d09e" gracePeriod=30 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.979487 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovnkube-controller/3.log" Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.981666 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovn-acl-logging/0.log" Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982125 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovn-controller/0.log" Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982480 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="2b9ff6ab4f162a1d535c08524bef44284e952c46441e4985ab3a72434482d09e" exitCode=0 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982524 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365" exitCode=0 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982532 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d" exitCode=0 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982538 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5" exitCode=0 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982546 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289" exitCode=0 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982554 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649" exitCode=0 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982562 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f" exitCode=143 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982568 4711 generic.go:334] "Generic (PLEG): container finished" podID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerID="65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154" exitCode=143 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982658 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"2b9ff6ab4f162a1d535c08524bef44284e952c46441e4985ab3a72434482d09e"} Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982685 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365"} Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982697 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d"} Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982706 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5"} Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982714 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289"} Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982723 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649"} Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982732 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f"} Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982740 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154"} Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.982760 4711 scope.go:117] "RemoveContainer" containerID="677bbe8d1690ae3e49caee53eab8ca7d0c7cf53813f44d3c4d96260363dbb073" Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.984101 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpxkq_8cc803a0-2626-4444-b4b2-8e9567277d44/kube-multus/2.log" Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.984606 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpxkq_8cc803a0-2626-4444-b4b2-8e9567277d44/kube-multus/1.log" Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.984711 4711 generic.go:334] "Generic (PLEG): container finished" podID="8cc803a0-2626-4444-b4b2-8e9567277d44" containerID="bc8d9ec28733822aa97c5e7621962019e13e1d9e7d871480a0272e63ddae78d3" exitCode=2 Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.984783 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpxkq" event={"ID":"8cc803a0-2626-4444-b4b2-8e9567277d44","Type":"ContainerDied","Data":"bc8d9ec28733822aa97c5e7621962019e13e1d9e7d871480a0272e63ddae78d3"} Jan 23 08:32:16 crc kubenswrapper[4711]: I0123 08:32:16.985358 4711 scope.go:117] "RemoveContainer" containerID="bc8d9ec28733822aa97c5e7621962019e13e1d9e7d871480a0272e63ddae78d3" Jan 23 08:32:16 crc kubenswrapper[4711]: E0123 08:32:16.985653 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-vpxkq_openshift-multus(8cc803a0-2626-4444-b4b2-8e9567277d44)\"" pod="openshift-multus/multus-vpxkq" podUID="8cc803a0-2626-4444-b4b2-8e9567277d44" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.025654 4711 scope.go:117] "RemoveContainer" containerID="eafae4831c9cebb04dc1fe0259fa32717a44734f39508d8cd162ae212d1429fa" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.027676 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovn-acl-logging/0.log" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.028051 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovn-controller/0.log" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.028493 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.121845 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4hjrl"] Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.122927 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovn-acl-logging" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.122948 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovn-acl-logging" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.122961 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8362e2cd-76b3-44f8-8d22-b8542c471584" containerName="pull" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.122971 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8362e2cd-76b3-44f8-8d22-b8542c471584" containerName="pull" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.122997 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123003 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123012 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="kubecfg-setup" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123018 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="kubecfg-setup" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123027 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovn-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123033 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovn-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123042 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123048 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123055 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="sbdb" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123062 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="sbdb" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123074 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="kube-rbac-proxy-node" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123080 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="kube-rbac-proxy-node" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123088 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123094 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123102 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="northd" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123108 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="northd" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123117 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="nbdb" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123124 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="nbdb" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123130 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8362e2cd-76b3-44f8-8d22-b8542c471584" containerName="extract" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123136 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8362e2cd-76b3-44f8-8d22-b8542c471584" containerName="extract" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123144 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123151 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123158 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="kube-rbac-proxy-ovn-metrics" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123165 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="kube-rbac-proxy-ovn-metrics" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123178 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8362e2cd-76b3-44f8-8d22-b8542c471584" containerName="util" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123184 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8362e2cd-76b3-44f8-8d22-b8542c471584" containerName="util" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123298 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="northd" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123308 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovn-acl-logging" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123319 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="kube-rbac-proxy-ovn-metrics" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123329 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovn-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123338 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123345 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="nbdb" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123352 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123358 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="kube-rbac-proxy-node" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123366 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123373 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8362e2cd-76b3-44f8-8d22-b8542c471584" containerName="extract" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123381 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123388 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="sbdb" Jan 23 08:32:17 crc kubenswrapper[4711]: E0123 08:32:17.123478 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123485 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.123604 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" containerName="ovnkube-controller" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.125916 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135024 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-kubelet\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135195 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-etc-openvswitch\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135284 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-log-socket\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135083 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135228 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135347 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-node-log\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135400 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-log-socket" (OuterVolumeSpecName: "log-socket") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135427 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-ovn\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135466 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-openvswitch\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135483 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-run-ovn-kubernetes\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135504 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovnkube-config\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135537 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-run-netns\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135566 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-systemd-units\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135581 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-env-overrides\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135605 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-cni-netd\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135621 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovn-node-metrics-cert\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135650 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-var-lib-openvswitch\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135665 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-systemd\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135680 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mppnf\" (UniqueName: \"kubernetes.io/projected/e16bfd0e-30fd-4fcf-865b-63400b88cff3-kube-api-access-mppnf\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135699 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovnkube-script-lib\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135717 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-var-lib-cni-networks-ovn-kubernetes\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135731 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-slash\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135747 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-cni-bin\") pod \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\" (UID: \"e16bfd0e-30fd-4fcf-865b-63400b88cff3\") " Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.136070 4711 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.136088 4711 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.136096 4711 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-log-socket\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135516 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.136207 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135549 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135572 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135581 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135622 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135654 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135688 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135899 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.135920 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-slash" (OuterVolumeSpecName: "host-slash") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.136019 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.136044 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-node-log" (OuterVolumeSpecName: "node-log") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.136120 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.136212 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.138957 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e16bfd0e-30fd-4fcf-865b-63400b88cff3-kube-api-access-mppnf" (OuterVolumeSpecName: "kube-api-access-mppnf") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "kube-api-access-mppnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.139182 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.146047 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "e16bfd0e-30fd-4fcf-865b-63400b88cff3" (UID: "e16bfd0e-30fd-4fcf-865b-63400b88cff3"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.294723 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.294775 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-etc-openvswitch\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.294801 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-var-lib-openvswitch\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.294831 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/811de1e3-ad1b-42a3-b464-2f74e47155dd-env-overrides\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.294867 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/811de1e3-ad1b-42a3-b464-2f74e47155dd-ovn-node-metrics-cert\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.294899 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-systemd-units\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.294927 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-run-netns\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.294960 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-slash\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.294979 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/811de1e3-ad1b-42a3-b464-2f74e47155dd-ovnkube-config\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295049 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-run-ovn-kubernetes\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295115 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-kubelet\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295152 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-run-systemd\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295172 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-run-openvswitch\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295192 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-run-ovn\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295239 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/811de1e3-ad1b-42a3-b464-2f74e47155dd-ovnkube-script-lib\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295300 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-node-log\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295322 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-cni-bin\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295356 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-log-socket\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295379 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-cni-netd\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295403 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9klxt\" (UniqueName: \"kubernetes.io/projected/811de1e3-ad1b-42a3-b464-2f74e47155dd-kube-api-access-9klxt\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295455 4711 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-node-log\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295471 4711 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295484 4711 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295497 4711 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295539 4711 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295552 4711 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295564 4711 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295574 4711 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295586 4711 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295597 4711 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295608 4711 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295619 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mppnf\" (UniqueName: \"kubernetes.io/projected/e16bfd0e-30fd-4fcf-865b-63400b88cff3-kube-api-access-mppnf\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295630 4711 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295642 4711 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e16bfd0e-30fd-4fcf-865b-63400b88cff3-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295651 4711 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-slash\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295662 4711 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.295673 4711 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e16bfd0e-30fd-4fcf-865b-63400b88cff3-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396061 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-run-systemd\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396190 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-run-systemd\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396292 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-run-openvswitch\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396351 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-run-openvswitch\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396573 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/811de1e3-ad1b-42a3-b464-2f74e47155dd-ovnkube-script-lib\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396663 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-run-ovn\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396753 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-node-log\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396827 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-run-ovn\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396834 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-node-log\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396846 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-cni-bin\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396919 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-cni-netd\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396934 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-cni-bin\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.396944 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9klxt\" (UniqueName: \"kubernetes.io/projected/811de1e3-ad1b-42a3-b464-2f74e47155dd-kube-api-access-9klxt\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397003 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-log-socket\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397043 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-cni-netd\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397075 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-etc-openvswitch\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397105 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-log-socket\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397053 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-etc-openvswitch\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397140 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397180 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-var-lib-openvswitch\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397221 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/811de1e3-ad1b-42a3-b464-2f74e47155dd-env-overrides\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397250 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/811de1e3-ad1b-42a3-b464-2f74e47155dd-ovn-node-metrics-cert\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397284 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-var-lib-openvswitch\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397291 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397311 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-systemd-units\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397340 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-systemd-units\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397376 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-run-netns\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397424 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-slash\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397457 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/811de1e3-ad1b-42a3-b464-2f74e47155dd-ovnkube-config\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397464 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-run-netns\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397492 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-run-ovn-kubernetes\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397544 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-run-ovn-kubernetes\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397556 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-kubelet\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397495 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-slash\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397596 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/811de1e3-ad1b-42a3-b464-2f74e47155dd-host-kubelet\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.397940 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/811de1e3-ad1b-42a3-b464-2f74e47155dd-env-overrides\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.398938 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/811de1e3-ad1b-42a3-b464-2f74e47155dd-ovnkube-config\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.399465 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/811de1e3-ad1b-42a3-b464-2f74e47155dd-ovnkube-script-lib\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.406377 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/811de1e3-ad1b-42a3-b464-2f74e47155dd-ovn-node-metrics-cert\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.418676 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9klxt\" (UniqueName: \"kubernetes.io/projected/811de1e3-ad1b-42a3-b464-2f74e47155dd-kube-api-access-9klxt\") pod \"ovnkube-node-4hjrl\" (UID: \"811de1e3-ad1b-42a3-b464-2f74e47155dd\") " pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.442042 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:17 crc kubenswrapper[4711]: W0123 08:32:17.460276 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod811de1e3_ad1b_42a3_b464_2f74e47155dd.slice/crio-b2567f9757766f104a0357333303b9852587d63c1e73df02fe4c209d3b8594ec WatchSource:0}: Error finding container b2567f9757766f104a0357333303b9852587d63c1e73df02fe4c209d3b8594ec: Status 404 returned error can't find the container with id b2567f9757766f104a0357333303b9852587d63c1e73df02fe4c209d3b8594ec Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.994804 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovn-acl-logging/0.log" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.995346 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jmffw_e16bfd0e-30fd-4fcf-865b-63400b88cff3/ovn-controller/0.log" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.995816 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" event={"ID":"e16bfd0e-30fd-4fcf-865b-63400b88cff3","Type":"ContainerDied","Data":"e6df4e19257cce69b10f5993d09781450de547988257ce794e0be35c6992f898"} Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.995891 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jmffw" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.995991 4711 scope.go:117] "RemoveContainer" containerID="2b9ff6ab4f162a1d535c08524bef44284e952c46441e4985ab3a72434482d09e" Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.997727 4711 generic.go:334] "Generic (PLEG): container finished" podID="811de1e3-ad1b-42a3-b464-2f74e47155dd" containerID="34dce3e4c2d53223838fee5db2d695045fa1c2706fd685d04fc23809fb561c9a" exitCode=0 Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.998340 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" event={"ID":"811de1e3-ad1b-42a3-b464-2f74e47155dd","Type":"ContainerDied","Data":"34dce3e4c2d53223838fee5db2d695045fa1c2706fd685d04fc23809fb561c9a"} Jan 23 08:32:17 crc kubenswrapper[4711]: I0123 08:32:17.998395 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" event={"ID":"811de1e3-ad1b-42a3-b464-2f74e47155dd","Type":"ContainerStarted","Data":"b2567f9757766f104a0357333303b9852587d63c1e73df02fe4c209d3b8594ec"} Jan 23 08:32:18 crc kubenswrapper[4711]: I0123 08:32:18.001093 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpxkq_8cc803a0-2626-4444-b4b2-8e9567277d44/kube-multus/2.log" Jan 23 08:32:18 crc kubenswrapper[4711]: I0123 08:32:18.014229 4711 scope.go:117] "RemoveContainer" containerID="793ec821c464b9210e50e7a16a1cf78b77210b4db0e0daa95cdf67410be34365" Jan 23 08:32:18 crc kubenswrapper[4711]: I0123 08:32:18.034488 4711 scope.go:117] "RemoveContainer" containerID="37d8352f90be4d41d8637b8af0daca4124d6f17aaeec29b0814fb26860955b9d" Jan 23 08:32:18 crc kubenswrapper[4711]: I0123 08:32:18.049589 4711 scope.go:117] "RemoveContainer" containerID="66d597fa7377e6d97905cac00540bfbde700dd7c702dce89a2b18ef8359a26f5" Jan 23 08:32:18 crc kubenswrapper[4711]: I0123 08:32:18.073812 4711 scope.go:117] "RemoveContainer" containerID="075a32907160650ec7893886d90c2ae7deb8958324a8b99e06bd881424602289" Jan 23 08:32:18 crc kubenswrapper[4711]: I0123 08:32:18.079646 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jmffw"] Jan 23 08:32:18 crc kubenswrapper[4711]: I0123 08:32:18.083640 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jmffw"] Jan 23 08:32:18 crc kubenswrapper[4711]: I0123 08:32:18.103636 4711 scope.go:117] "RemoveContainer" containerID="a792da782614c6293b595aeedc02c27c0deaf6518ef4ad5fac7f3bcdaeea3649" Jan 23 08:32:18 crc kubenswrapper[4711]: I0123 08:32:18.116297 4711 scope.go:117] "RemoveContainer" containerID="34ae91d59e2caa1bbd0fdd9aa2a8c4e80fb064c9ec828662aa02d478da07bc5f" Jan 23 08:32:18 crc kubenswrapper[4711]: I0123 08:32:18.128717 4711 scope.go:117] "RemoveContainer" containerID="65646dad241b1f4c967e489c9244b559d4ba2bf4f988536c8b03e50975521154" Jan 23 08:32:18 crc kubenswrapper[4711]: I0123 08:32:18.144442 4711 scope.go:117] "RemoveContainer" containerID="ee5706111f594179890219490d7335d769c631ae9d656a24f7ea3e8a01a09edc" Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.011155 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" event={"ID":"811de1e3-ad1b-42a3-b464-2f74e47155dd","Type":"ContainerStarted","Data":"98e2d9e29a44f94d78e622fd1ca1b3d5a8c0f63cd38366e69c9fb377d76ec1c9"} Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.011450 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" event={"ID":"811de1e3-ad1b-42a3-b464-2f74e47155dd","Type":"ContainerStarted","Data":"53690d4d6403f1375664ae6b57b04ad2cf29a86a917687a2003bc5dd69d5e6b8"} Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.011471 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" event={"ID":"811de1e3-ad1b-42a3-b464-2f74e47155dd","Type":"ContainerStarted","Data":"52eb5cf88cec7937693508d5844343bb4f83499a2efde13b913215371b2590ee"} Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.011484 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" event={"ID":"811de1e3-ad1b-42a3-b464-2f74e47155dd","Type":"ContainerStarted","Data":"87f93d6f196b48d5621beea217ee28f67fe2b3d81405d9b53c4973cb5480728e"} Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.011495 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" event={"ID":"811de1e3-ad1b-42a3-b464-2f74e47155dd","Type":"ContainerStarted","Data":"2a2d9c8c69c5d287f357bcddc7b23f3e40e0b342acb45a6ad4628b82830aaa5a"} Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.011535 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" event={"ID":"811de1e3-ad1b-42a3-b464-2f74e47155dd","Type":"ContainerStarted","Data":"3fa43da6d49ee7339a826b26f350289700651ab3dbbf78a8422e524182b59a6b"} Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.485819 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e16bfd0e-30fd-4fcf-865b-63400b88cff3" path="/var/lib/kubelet/pods/e16bfd0e-30fd-4fcf-865b-63400b88cff3/volumes" Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.810299 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-k54qf"] Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.811324 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.813934 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.814096 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.814342 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-q7ghb" Jan 23 08:32:19 crc kubenswrapper[4711]: I0123 08:32:19.928555 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkf99\" (UniqueName: \"kubernetes.io/projected/6697496e-11e9-4b32-8e2f-d485235b0d8a-kube-api-access-wkf99\") pod \"nmstate-operator-646758c888-k54qf\" (UID: \"6697496e-11e9-4b32-8e2f-d485235b0d8a\") " pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:20 crc kubenswrapper[4711]: I0123 08:32:20.029797 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkf99\" (UniqueName: \"kubernetes.io/projected/6697496e-11e9-4b32-8e2f-d485235b0d8a-kube-api-access-wkf99\") pod \"nmstate-operator-646758c888-k54qf\" (UID: \"6697496e-11e9-4b32-8e2f-d485235b0d8a\") " pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:20 crc kubenswrapper[4711]: I0123 08:32:20.047589 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkf99\" (UniqueName: \"kubernetes.io/projected/6697496e-11e9-4b32-8e2f-d485235b0d8a-kube-api-access-wkf99\") pod \"nmstate-operator-646758c888-k54qf\" (UID: \"6697496e-11e9-4b32-8e2f-d485235b0d8a\") " pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:20 crc kubenswrapper[4711]: I0123 08:32:20.123906 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:20 crc kubenswrapper[4711]: E0123 08:32:20.144883 4711 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-k54qf_openshift-nmstate_6697496e-11e9-4b32-8e2f-d485235b0d8a_0(5a4a9dd6345ee7e762765b8744d96e1f52fd9085528efcf4f09632d615219bcb): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 23 08:32:20 crc kubenswrapper[4711]: E0123 08:32:20.144960 4711 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-k54qf_openshift-nmstate_6697496e-11e9-4b32-8e2f-d485235b0d8a_0(5a4a9dd6345ee7e762765b8744d96e1f52fd9085528efcf4f09632d615219bcb): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:20 crc kubenswrapper[4711]: E0123 08:32:20.144987 4711 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-k54qf_openshift-nmstate_6697496e-11e9-4b32-8e2f-d485235b0d8a_0(5a4a9dd6345ee7e762765b8744d96e1f52fd9085528efcf4f09632d615219bcb): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:20 crc kubenswrapper[4711]: E0123 08:32:20.145042 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-operator-646758c888-k54qf_openshift-nmstate(6697496e-11e9-4b32-8e2f-d485235b0d8a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-operator-646758c888-k54qf_openshift-nmstate(6697496e-11e9-4b32-8e2f-d485235b0d8a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-k54qf_openshift-nmstate_6697496e-11e9-4b32-8e2f-d485235b0d8a_0(5a4a9dd6345ee7e762765b8744d96e1f52fd9085528efcf4f09632d615219bcb): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" podUID="6697496e-11e9-4b32-8e2f-d485235b0d8a" Jan 23 08:32:21 crc kubenswrapper[4711]: I0123 08:32:21.024646 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" event={"ID":"811de1e3-ad1b-42a3-b464-2f74e47155dd","Type":"ContainerStarted","Data":"46175a17eb985cdf9290c07de0746d2a980438ef458e4430db07e799eab560ca"} Jan 23 08:32:23 crc kubenswrapper[4711]: I0123 08:32:23.040253 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" event={"ID":"811de1e3-ad1b-42a3-b464-2f74e47155dd","Type":"ContainerStarted","Data":"59bb80dfa5c0a58fcba4912f38d36c8fade2cecf84a32975c2f9379d6727869e"} Jan 23 08:32:23 crc kubenswrapper[4711]: I0123 08:32:23.040771 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:23 crc kubenswrapper[4711]: I0123 08:32:23.040785 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:23 crc kubenswrapper[4711]: I0123 08:32:23.040793 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:23 crc kubenswrapper[4711]: I0123 08:32:23.066663 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:23 crc kubenswrapper[4711]: I0123 08:32:23.070506 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:23 crc kubenswrapper[4711]: I0123 08:32:23.112822 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" podStartSLOduration=6.112794107 podStartE2EDuration="6.112794107s" podCreationTimestamp="2026-01-23 08:32:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:32:23.080742359 +0000 UTC m=+728.653698737" watchObservedRunningTime="2026-01-23 08:32:23.112794107 +0000 UTC m=+728.685750495" Jan 23 08:32:23 crc kubenswrapper[4711]: I0123 08:32:23.717349 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-k54qf"] Jan 23 08:32:23 crc kubenswrapper[4711]: I0123 08:32:23.717465 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:23 crc kubenswrapper[4711]: I0123 08:32:23.717871 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:23 crc kubenswrapper[4711]: E0123 08:32:23.753422 4711 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-k54qf_openshift-nmstate_6697496e-11e9-4b32-8e2f-d485235b0d8a_0(dfb57aff1297c63d362a14c5f6104fbe15c16708a64c059e71806b7bb4a1680a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 23 08:32:23 crc kubenswrapper[4711]: E0123 08:32:23.753482 4711 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-k54qf_openshift-nmstate_6697496e-11e9-4b32-8e2f-d485235b0d8a_0(dfb57aff1297c63d362a14c5f6104fbe15c16708a64c059e71806b7bb4a1680a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:23 crc kubenswrapper[4711]: E0123 08:32:23.753506 4711 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-k54qf_openshift-nmstate_6697496e-11e9-4b32-8e2f-d485235b0d8a_0(dfb57aff1297c63d362a14c5f6104fbe15c16708a64c059e71806b7bb4a1680a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:23 crc kubenswrapper[4711]: E0123 08:32:23.753563 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-operator-646758c888-k54qf_openshift-nmstate(6697496e-11e9-4b32-8e2f-d485235b0d8a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-operator-646758c888-k54qf_openshift-nmstate(6697496e-11e9-4b32-8e2f-d485235b0d8a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-k54qf_openshift-nmstate_6697496e-11e9-4b32-8e2f-d485235b0d8a_0(dfb57aff1297c63d362a14c5f6104fbe15c16708a64c059e71806b7bb4a1680a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" podUID="6697496e-11e9-4b32-8e2f-d485235b0d8a" Jan 23 08:32:29 crc kubenswrapper[4711]: I0123 08:32:29.473754 4711 scope.go:117] "RemoveContainer" containerID="bc8d9ec28733822aa97c5e7621962019e13e1d9e7d871480a0272e63ddae78d3" Jan 23 08:32:29 crc kubenswrapper[4711]: E0123 08:32:29.474560 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-vpxkq_openshift-multus(8cc803a0-2626-4444-b4b2-8e9567277d44)\"" pod="openshift-multus/multus-vpxkq" podUID="8cc803a0-2626-4444-b4b2-8e9567277d44" Jan 23 08:32:35 crc kubenswrapper[4711]: I0123 08:32:35.473231 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:35 crc kubenswrapper[4711]: I0123 08:32:35.478440 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:35 crc kubenswrapper[4711]: E0123 08:32:35.519828 4711 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-k54qf_openshift-nmstate_6697496e-11e9-4b32-8e2f-d485235b0d8a_0(730ed854846cb78a6237b7c3afd45c7bdc9ad20fcad2e225d27ad0307ef42112): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 23 08:32:35 crc kubenswrapper[4711]: E0123 08:32:35.519917 4711 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-k54qf_openshift-nmstate_6697496e-11e9-4b32-8e2f-d485235b0d8a_0(730ed854846cb78a6237b7c3afd45c7bdc9ad20fcad2e225d27ad0307ef42112): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:35 crc kubenswrapper[4711]: E0123 08:32:35.519943 4711 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-k54qf_openshift-nmstate_6697496e-11e9-4b32-8e2f-d485235b0d8a_0(730ed854846cb78a6237b7c3afd45c7bdc9ad20fcad2e225d27ad0307ef42112): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:35 crc kubenswrapper[4711]: E0123 08:32:35.519994 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-operator-646758c888-k54qf_openshift-nmstate(6697496e-11e9-4b32-8e2f-d485235b0d8a)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-operator-646758c888-k54qf_openshift-nmstate(6697496e-11e9-4b32-8e2f-d485235b0d8a)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-k54qf_openshift-nmstate_6697496e-11e9-4b32-8e2f-d485235b0d8a_0(730ed854846cb78a6237b7c3afd45c7bdc9ad20fcad2e225d27ad0307ef42112): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" podUID="6697496e-11e9-4b32-8e2f-d485235b0d8a" Jan 23 08:32:42 crc kubenswrapper[4711]: I0123 08:32:42.473858 4711 scope.go:117] "RemoveContainer" containerID="bc8d9ec28733822aa97c5e7621962019e13e1d9e7d871480a0272e63ddae78d3" Jan 23 08:32:43 crc kubenswrapper[4711]: I0123 08:32:43.151809 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vpxkq_8cc803a0-2626-4444-b4b2-8e9567277d44/kube-multus/2.log" Jan 23 08:32:43 crc kubenswrapper[4711]: I0123 08:32:43.152251 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vpxkq" event={"ID":"8cc803a0-2626-4444-b4b2-8e9567277d44","Type":"ContainerStarted","Data":"84ce72460018b6cc367fe4d2435d65ecdb767a6db2791b4b88480f8e45b5d13a"} Jan 23 08:32:47 crc kubenswrapper[4711]: I0123 08:32:47.475060 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:47 crc kubenswrapper[4711]: I0123 08:32:47.476262 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" Jan 23 08:32:47 crc kubenswrapper[4711]: I0123 08:32:47.489039 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4hjrl" Jan 23 08:32:49 crc kubenswrapper[4711]: I0123 08:32:49.805566 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-k54qf"] Jan 23 08:32:50 crc kubenswrapper[4711]: I0123 08:32:50.198420 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" event={"ID":"6697496e-11e9-4b32-8e2f-d485235b0d8a","Type":"ContainerStarted","Data":"c91bf8365106809c5b08ea5d00def3dd12bd05a47e3676afade44efb01c1c5db"} Jan 23 08:32:53 crc kubenswrapper[4711]: I0123 08:32:53.214270 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" event={"ID":"6697496e-11e9-4b32-8e2f-d485235b0d8a","Type":"ContainerStarted","Data":"454f4e1809a852f98f7e7fc0e480c97b0b6dca5dbc17eb2cc9e6af9ae89194b7"} Jan 23 08:32:53 crc kubenswrapper[4711]: I0123 08:32:53.236862 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-k54qf" podStartSLOduration=31.974990465 podStartE2EDuration="34.236840905s" podCreationTimestamp="2026-01-23 08:32:19 +0000 UTC" firstStartedPulling="2026-01-23 08:32:49.811452368 +0000 UTC m=+755.384408736" lastFinishedPulling="2026-01-23 08:32:52.073302808 +0000 UTC m=+757.646259176" observedRunningTime="2026-01-23 08:32:53.228921483 +0000 UTC m=+758.801877851" watchObservedRunningTime="2026-01-23 08:32:53.236840905 +0000 UTC m=+758.809797273" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.260151 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-nbz56"] Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.261486 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-nbz56" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.263531 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-r6wt6" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.271618 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp"] Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.272215 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.280665 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72dtz\" (UniqueName: \"kubernetes.io/projected/56aed6bb-34ef-4b46-a0e8-a3da8931d069-kube-api-access-72dtz\") pod \"nmstate-metrics-54757c584b-nbz56\" (UID: \"56aed6bb-34ef-4b46-a0e8-a3da8931d069\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-nbz56" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.280734 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e200b62d-4a7a-4726-9650-3ac95e53ba0d-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-bzrxp\" (UID: \"e200b62d-4a7a-4726-9650-3ac95e53ba0d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.280822 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn25z\" (UniqueName: \"kubernetes.io/projected/e200b62d-4a7a-4726-9650-3ac95e53ba0d-kube-api-access-zn25z\") pod \"nmstate-webhook-8474b5b9d8-bzrxp\" (UID: \"e200b62d-4a7a-4726-9650-3ac95e53ba0d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.287705 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-nbz56"] Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.287970 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.353821 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp"] Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.372658 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-htpp8"] Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.373246 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.387200 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72dtz\" (UniqueName: \"kubernetes.io/projected/56aed6bb-34ef-4b46-a0e8-a3da8931d069-kube-api-access-72dtz\") pod \"nmstate-metrics-54757c584b-nbz56\" (UID: \"56aed6bb-34ef-4b46-a0e8-a3da8931d069\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-nbz56" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.387280 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e200b62d-4a7a-4726-9650-3ac95e53ba0d-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-bzrxp\" (UID: \"e200b62d-4a7a-4726-9650-3ac95e53ba0d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.387332 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn25z\" (UniqueName: \"kubernetes.io/projected/e200b62d-4a7a-4726-9650-3ac95e53ba0d-kube-api-access-zn25z\") pod \"nmstate-webhook-8474b5b9d8-bzrxp\" (UID: \"e200b62d-4a7a-4726-9650-3ac95e53ba0d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" Jan 23 08:32:54 crc kubenswrapper[4711]: E0123 08:32:54.387757 4711 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Jan 23 08:32:54 crc kubenswrapper[4711]: E0123 08:32:54.387837 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e200b62d-4a7a-4726-9650-3ac95e53ba0d-tls-key-pair podName:e200b62d-4a7a-4726-9650-3ac95e53ba0d nodeName:}" failed. No retries permitted until 2026-01-23 08:32:54.887816781 +0000 UTC m=+760.460773149 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/e200b62d-4a7a-4726-9650-3ac95e53ba0d-tls-key-pair") pod "nmstate-webhook-8474b5b9d8-bzrxp" (UID: "e200b62d-4a7a-4726-9650-3ac95e53ba0d") : secret "openshift-nmstate-webhook" not found Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.432794 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn25z\" (UniqueName: \"kubernetes.io/projected/e200b62d-4a7a-4726-9650-3ac95e53ba0d-kube-api-access-zn25z\") pod \"nmstate-webhook-8474b5b9d8-bzrxp\" (UID: \"e200b62d-4a7a-4726-9650-3ac95e53ba0d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.442379 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72dtz\" (UniqueName: \"kubernetes.io/projected/56aed6bb-34ef-4b46-a0e8-a3da8931d069-kube-api-access-72dtz\") pod \"nmstate-metrics-54757c584b-nbz56\" (UID: \"56aed6bb-34ef-4b46-a0e8-a3da8931d069\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-nbz56" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.477309 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9"] Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.477958 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.481633 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.481841 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-r6nrk" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.481923 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.488055 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t87pc\" (UniqueName: \"kubernetes.io/projected/2e3923e6-c7fb-4a8a-a621-86a8799f4525-kube-api-access-t87pc\") pod \"nmstate-handler-htpp8\" (UID: \"2e3923e6-c7fb-4a8a-a621-86a8799f4525\") " pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.490259 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/2e3923e6-c7fb-4a8a-a621-86a8799f4525-dbus-socket\") pod \"nmstate-handler-htpp8\" (UID: \"2e3923e6-c7fb-4a8a-a621-86a8799f4525\") " pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.490369 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/2e3923e6-c7fb-4a8a-a621-86a8799f4525-ovs-socket\") pod \"nmstate-handler-htpp8\" (UID: \"2e3923e6-c7fb-4a8a-a621-86a8799f4525\") " pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.490461 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/2e3923e6-c7fb-4a8a-a621-86a8799f4525-nmstate-lock\") pod \"nmstate-handler-htpp8\" (UID: \"2e3923e6-c7fb-4a8a-a621-86a8799f4525\") " pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.493530 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9"] Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.591784 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/2e3923e6-c7fb-4a8a-a621-86a8799f4525-ovs-socket\") pod \"nmstate-handler-htpp8\" (UID: \"2e3923e6-c7fb-4a8a-a621-86a8799f4525\") " pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.591830 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/2e3923e6-c7fb-4a8a-a621-86a8799f4525-nmstate-lock\") pod \"nmstate-handler-htpp8\" (UID: \"2e3923e6-c7fb-4a8a-a621-86a8799f4525\") " pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.591880 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/27dd60bd-f0ca-4e99-b5ff-70d34f58cf63-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-r5vd9\" (UID: \"27dd60bd-f0ca-4e99-b5ff-70d34f58cf63\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.591913 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/27dd60bd-f0ca-4e99-b5ff-70d34f58cf63-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-r5vd9\" (UID: \"27dd60bd-f0ca-4e99-b5ff-70d34f58cf63\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.591932 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/2e3923e6-c7fb-4a8a-a621-86a8799f4525-ovs-socket\") pod \"nmstate-handler-htpp8\" (UID: \"2e3923e6-c7fb-4a8a-a621-86a8799f4525\") " pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.591961 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t87pc\" (UniqueName: \"kubernetes.io/projected/2e3923e6-c7fb-4a8a-a621-86a8799f4525-kube-api-access-t87pc\") pod \"nmstate-handler-htpp8\" (UID: \"2e3923e6-c7fb-4a8a-a621-86a8799f4525\") " pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.591963 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/2e3923e6-c7fb-4a8a-a621-86a8799f4525-nmstate-lock\") pod \"nmstate-handler-htpp8\" (UID: \"2e3923e6-c7fb-4a8a-a621-86a8799f4525\") " pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.592057 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrmpq\" (UniqueName: \"kubernetes.io/projected/27dd60bd-f0ca-4e99-b5ff-70d34f58cf63-kube-api-access-rrmpq\") pod \"nmstate-console-plugin-7754f76f8b-r5vd9\" (UID: \"27dd60bd-f0ca-4e99-b5ff-70d34f58cf63\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.592083 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/2e3923e6-c7fb-4a8a-a621-86a8799f4525-dbus-socket\") pod \"nmstate-handler-htpp8\" (UID: \"2e3923e6-c7fb-4a8a-a621-86a8799f4525\") " pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.592355 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/2e3923e6-c7fb-4a8a-a621-86a8799f4525-dbus-socket\") pod \"nmstate-handler-htpp8\" (UID: \"2e3923e6-c7fb-4a8a-a621-86a8799f4525\") " pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.625261 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t87pc\" (UniqueName: \"kubernetes.io/projected/2e3923e6-c7fb-4a8a-a621-86a8799f4525-kube-api-access-t87pc\") pod \"nmstate-handler-htpp8\" (UID: \"2e3923e6-c7fb-4a8a-a621-86a8799f4525\") " pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.636682 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-nbz56" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.645585 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-75bc6c8444-btrpf"] Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.646215 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.659221 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-75bc6c8444-btrpf"] Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.693382 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lwxg\" (UniqueName: \"kubernetes.io/projected/33be9e33-0ab3-4310-8594-7b4ac5f580d7-kube-api-access-9lwxg\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.693444 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33be9e33-0ab3-4310-8594-7b4ac5f580d7-service-ca\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.693478 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/33be9e33-0ab3-4310-8594-7b4ac5f580d7-console-oauth-config\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.693526 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/27dd60bd-f0ca-4e99-b5ff-70d34f58cf63-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-r5vd9\" (UID: \"27dd60bd-f0ca-4e99-b5ff-70d34f58cf63\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.693556 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/27dd60bd-f0ca-4e99-b5ff-70d34f58cf63-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-r5vd9\" (UID: \"27dd60bd-f0ca-4e99-b5ff-70d34f58cf63\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.693598 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/33be9e33-0ab3-4310-8594-7b4ac5f580d7-oauth-serving-cert\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.693630 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/33be9e33-0ab3-4310-8594-7b4ac5f580d7-trusted-ca-bundle\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.693659 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/33be9e33-0ab3-4310-8594-7b4ac5f580d7-console-config\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.693682 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/33be9e33-0ab3-4310-8594-7b4ac5f580d7-console-serving-cert\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.693712 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrmpq\" (UniqueName: \"kubernetes.io/projected/27dd60bd-f0ca-4e99-b5ff-70d34f58cf63-kube-api-access-rrmpq\") pod \"nmstate-console-plugin-7754f76f8b-r5vd9\" (UID: \"27dd60bd-f0ca-4e99-b5ff-70d34f58cf63\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.694629 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/27dd60bd-f0ca-4e99-b5ff-70d34f58cf63-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-r5vd9\" (UID: \"27dd60bd-f0ca-4e99-b5ff-70d34f58cf63\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" Jan 23 08:32:54 crc kubenswrapper[4711]: E0123 08:32:54.694722 4711 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Jan 23 08:32:54 crc kubenswrapper[4711]: E0123 08:32:54.694787 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27dd60bd-f0ca-4e99-b5ff-70d34f58cf63-plugin-serving-cert podName:27dd60bd-f0ca-4e99-b5ff-70d34f58cf63 nodeName:}" failed. No retries permitted until 2026-01-23 08:32:55.194773256 +0000 UTC m=+760.767729614 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/27dd60bd-f0ca-4e99-b5ff-70d34f58cf63-plugin-serving-cert") pod "nmstate-console-plugin-7754f76f8b-r5vd9" (UID: "27dd60bd-f0ca-4e99-b5ff-70d34f58cf63") : secret "plugin-serving-cert" not found Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.701074 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.716372 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrmpq\" (UniqueName: \"kubernetes.io/projected/27dd60bd-f0ca-4e99-b5ff-70d34f58cf63-kube-api-access-rrmpq\") pod \"nmstate-console-plugin-7754f76f8b-r5vd9\" (UID: \"27dd60bd-f0ca-4e99-b5ff-70d34f58cf63\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" Jan 23 08:32:54 crc kubenswrapper[4711]: W0123 08:32:54.723335 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e3923e6_c7fb_4a8a_a621_86a8799f4525.slice/crio-0ad8bbc06e392a24df0355af80138c4e51ea0c5ccbab44f319f75b67b6806ccb WatchSource:0}: Error finding container 0ad8bbc06e392a24df0355af80138c4e51ea0c5ccbab44f319f75b67b6806ccb: Status 404 returned error can't find the container with id 0ad8bbc06e392a24df0355af80138c4e51ea0c5ccbab44f319f75b67b6806ccb Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.794721 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lwxg\" (UniqueName: \"kubernetes.io/projected/33be9e33-0ab3-4310-8594-7b4ac5f580d7-kube-api-access-9lwxg\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.794799 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33be9e33-0ab3-4310-8594-7b4ac5f580d7-service-ca\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.794841 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/33be9e33-0ab3-4310-8594-7b4ac5f580d7-console-oauth-config\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.794890 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/33be9e33-0ab3-4310-8594-7b4ac5f580d7-oauth-serving-cert\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.794929 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/33be9e33-0ab3-4310-8594-7b4ac5f580d7-trusted-ca-bundle\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.794966 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/33be9e33-0ab3-4310-8594-7b4ac5f580d7-console-config\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.795000 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/33be9e33-0ab3-4310-8594-7b4ac5f580d7-console-serving-cert\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.796653 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/33be9e33-0ab3-4310-8594-7b4ac5f580d7-service-ca\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.796957 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/33be9e33-0ab3-4310-8594-7b4ac5f580d7-oauth-serving-cert\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.798126 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/33be9e33-0ab3-4310-8594-7b4ac5f580d7-console-config\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.800441 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/33be9e33-0ab3-4310-8594-7b4ac5f580d7-console-oauth-config\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.800499 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/33be9e33-0ab3-4310-8594-7b4ac5f580d7-console-serving-cert\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.802332 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/33be9e33-0ab3-4310-8594-7b4ac5f580d7-trusted-ca-bundle\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.812575 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lwxg\" (UniqueName: \"kubernetes.io/projected/33be9e33-0ab3-4310-8594-7b4ac5f580d7-kube-api-access-9lwxg\") pod \"console-75bc6c8444-btrpf\" (UID: \"33be9e33-0ab3-4310-8594-7b4ac5f580d7\") " pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.835095 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-nbz56"] Jan 23 08:32:54 crc kubenswrapper[4711]: W0123 08:32:54.839316 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56aed6bb_34ef_4b46_a0e8_a3da8931d069.slice/crio-744771bfa02189a718a6e76571a1c38021f168972907a7d71e96eae37a23ff6b WatchSource:0}: Error finding container 744771bfa02189a718a6e76571a1c38021f168972907a7d71e96eae37a23ff6b: Status 404 returned error can't find the container with id 744771bfa02189a718a6e76571a1c38021f168972907a7d71e96eae37a23ff6b Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.896185 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e200b62d-4a7a-4726-9650-3ac95e53ba0d-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-bzrxp\" (UID: \"e200b62d-4a7a-4726-9650-3ac95e53ba0d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.900169 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e200b62d-4a7a-4726-9650-3ac95e53ba0d-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-bzrxp\" (UID: \"e200b62d-4a7a-4726-9650-3ac95e53ba0d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.958126 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" Jan 23 08:32:54 crc kubenswrapper[4711]: I0123 08:32:54.994732 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:32:55 crc kubenswrapper[4711]: I0123 08:32:55.199821 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/27dd60bd-f0ca-4e99-b5ff-70d34f58cf63-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-r5vd9\" (UID: \"27dd60bd-f0ca-4e99-b5ff-70d34f58cf63\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" Jan 23 08:32:55 crc kubenswrapper[4711]: I0123 08:32:55.203713 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/27dd60bd-f0ca-4e99-b5ff-70d34f58cf63-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-r5vd9\" (UID: \"27dd60bd-f0ca-4e99-b5ff-70d34f58cf63\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" Jan 23 08:32:55 crc kubenswrapper[4711]: I0123 08:32:55.214501 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-75bc6c8444-btrpf"] Jan 23 08:32:55 crc kubenswrapper[4711]: W0123 08:32:55.219538 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33be9e33_0ab3_4310_8594_7b4ac5f580d7.slice/crio-926c0980d8680cd1d5abdfe19946a59d947f727f20e8ba1d06d8914f600df32d WatchSource:0}: Error finding container 926c0980d8680cd1d5abdfe19946a59d947f727f20e8ba1d06d8914f600df32d: Status 404 returned error can't find the container with id 926c0980d8680cd1d5abdfe19946a59d947f727f20e8ba1d06d8914f600df32d Jan 23 08:32:55 crc kubenswrapper[4711]: I0123 08:32:55.223839 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-htpp8" event={"ID":"2e3923e6-c7fb-4a8a-a621-86a8799f4525","Type":"ContainerStarted","Data":"0ad8bbc06e392a24df0355af80138c4e51ea0c5ccbab44f319f75b67b6806ccb"} Jan 23 08:32:55 crc kubenswrapper[4711]: I0123 08:32:55.225325 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-nbz56" event={"ID":"56aed6bb-34ef-4b46-a0e8-a3da8931d069","Type":"ContainerStarted","Data":"744771bfa02189a718a6e76571a1c38021f168972907a7d71e96eae37a23ff6b"} Jan 23 08:32:55 crc kubenswrapper[4711]: I0123 08:32:55.373102 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp"] Jan 23 08:32:55 crc kubenswrapper[4711]: I0123 08:32:55.391453 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" Jan 23 08:32:55 crc kubenswrapper[4711]: I0123 08:32:55.614495 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9"] Jan 23 08:32:56 crc kubenswrapper[4711]: I0123 08:32:56.231846 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" event={"ID":"27dd60bd-f0ca-4e99-b5ff-70d34f58cf63","Type":"ContainerStarted","Data":"2f0afa4b7f2edfa373c94f6fbbdda393a2e8a13024304ce8f295d81da6a026dc"} Jan 23 08:32:56 crc kubenswrapper[4711]: I0123 08:32:56.233257 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" event={"ID":"e200b62d-4a7a-4726-9650-3ac95e53ba0d","Type":"ContainerStarted","Data":"3c997fb941e9b883111612127bd92d1978710b57ee9f03699581a7a389d628c2"} Jan 23 08:32:56 crc kubenswrapper[4711]: I0123 08:32:56.234753 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-75bc6c8444-btrpf" event={"ID":"33be9e33-0ab3-4310-8594-7b4ac5f580d7","Type":"ContainerStarted","Data":"926c0980d8680cd1d5abdfe19946a59d947f727f20e8ba1d06d8914f600df32d"} Jan 23 08:32:59 crc kubenswrapper[4711]: I0123 08:32:59.254438 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-75bc6c8444-btrpf" event={"ID":"33be9e33-0ab3-4310-8594-7b4ac5f580d7","Type":"ContainerStarted","Data":"67c48d371afd7f82d57357f6a8c7422d511ac9215b7ec8df1640487e49ee0c6f"} Jan 23 08:33:01 crc kubenswrapper[4711]: I0123 08:33:01.290130 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-75bc6c8444-btrpf" podStartSLOduration=7.29011297 podStartE2EDuration="7.29011297s" podCreationTimestamp="2026-01-23 08:32:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:33:01.286404205 +0000 UTC m=+766.859360593" watchObservedRunningTime="2026-01-23 08:33:01.29011297 +0000 UTC m=+766.863069338" Jan 23 08:33:04 crc kubenswrapper[4711]: I0123 08:33:04.294850 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" event={"ID":"e200b62d-4a7a-4726-9650-3ac95e53ba0d","Type":"ContainerStarted","Data":"3b20cff4d0c46e2ac1ea3a1d42fae4a3b53a584b8c52fdae38b426a4a7edda82"} Jan 23 08:33:04 crc kubenswrapper[4711]: I0123 08:33:04.295944 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" Jan 23 08:33:04 crc kubenswrapper[4711]: I0123 08:33:04.296908 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-nbz56" event={"ID":"56aed6bb-34ef-4b46-a0e8-a3da8931d069","Type":"ContainerStarted","Data":"8c37cb9767f23be2d5650ba52e12158e8993afa71fd138e0cfe62ffa5051e1eb"} Jan 23 08:33:04 crc kubenswrapper[4711]: I0123 08:33:04.301732 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" event={"ID":"27dd60bd-f0ca-4e99-b5ff-70d34f58cf63","Type":"ContainerStarted","Data":"4224d9cbd6d664c6e0532c56c0773808c5bf73173f10b0118e387b716e25fbe0"} Jan 23 08:33:04 crc kubenswrapper[4711]: I0123 08:33:04.303567 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-htpp8" event={"ID":"2e3923e6-c7fb-4a8a-a621-86a8799f4525","Type":"ContainerStarted","Data":"87eabb469bab502aa30d313e8940c75a423dc4a8a6a2909a44d536da27fc8a79"} Jan 23 08:33:04 crc kubenswrapper[4711]: I0123 08:33:04.304621 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:33:04 crc kubenswrapper[4711]: I0123 08:33:04.315908 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" podStartSLOduration=2.249704223 podStartE2EDuration="10.315888747s" podCreationTimestamp="2026-01-23 08:32:54 +0000 UTC" firstStartedPulling="2026-01-23 08:32:55.382762906 +0000 UTC m=+760.955719274" lastFinishedPulling="2026-01-23 08:33:03.44894743 +0000 UTC m=+769.021903798" observedRunningTime="2026-01-23 08:33:04.309580225 +0000 UTC m=+769.882536593" watchObservedRunningTime="2026-01-23 08:33:04.315888747 +0000 UTC m=+769.888845115" Jan 23 08:33:04 crc kubenswrapper[4711]: I0123 08:33:04.353942 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-htpp8" podStartSLOduration=1.640953705 podStartE2EDuration="10.353924327s" podCreationTimestamp="2026-01-23 08:32:54 +0000 UTC" firstStartedPulling="2026-01-23 08:32:54.725780267 +0000 UTC m=+760.298736635" lastFinishedPulling="2026-01-23 08:33:03.438750889 +0000 UTC m=+769.011707257" observedRunningTime="2026-01-23 08:33:04.351377732 +0000 UTC m=+769.924334120" watchObservedRunningTime="2026-01-23 08:33:04.353924327 +0000 UTC m=+769.926880695" Jan 23 08:33:04 crc kubenswrapper[4711]: I0123 08:33:04.356497 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-r5vd9" podStartSLOduration=2.541649164 podStartE2EDuration="10.356487603s" podCreationTimestamp="2026-01-23 08:32:54 +0000 UTC" firstStartedPulling="2026-01-23 08:32:55.622071893 +0000 UTC m=+761.195028261" lastFinishedPulling="2026-01-23 08:33:03.436910322 +0000 UTC m=+769.009866700" observedRunningTime="2026-01-23 08:33:04.328784026 +0000 UTC m=+769.901740404" watchObservedRunningTime="2026-01-23 08:33:04.356487603 +0000 UTC m=+769.929443971" Jan 23 08:33:04 crc kubenswrapper[4711]: I0123 08:33:04.995040 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:33:04 crc kubenswrapper[4711]: I0123 08:33:04.995350 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:33:05 crc kubenswrapper[4711]: I0123 08:33:05.001202 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:33:05 crc kubenswrapper[4711]: I0123 08:33:05.316167 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-75bc6c8444-btrpf" Jan 23 08:33:05 crc kubenswrapper[4711]: I0123 08:33:05.372186 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-sk8zj"] Jan 23 08:33:06 crc kubenswrapper[4711]: I0123 08:33:06.139477 4711 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 23 08:33:07 crc kubenswrapper[4711]: I0123 08:33:07.328082 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-nbz56" event={"ID":"56aed6bb-34ef-4b46-a0e8-a3da8931d069","Type":"ContainerStarted","Data":"a1f7ee1079722faf8f960046e06ff3b3b134f53723c72e2e7372852d45f3ec5c"} Jan 23 08:33:07 crc kubenswrapper[4711]: I0123 08:33:07.348964 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-nbz56" podStartSLOduration=1.2861375 podStartE2EDuration="13.348948939s" podCreationTimestamp="2026-01-23 08:32:54 +0000 UTC" firstStartedPulling="2026-01-23 08:32:54.841379528 +0000 UTC m=+760.414335896" lastFinishedPulling="2026-01-23 08:33:06.904190977 +0000 UTC m=+772.477147335" observedRunningTime="2026-01-23 08:33:07.348415846 +0000 UTC m=+772.921372224" watchObservedRunningTime="2026-01-23 08:33:07.348948939 +0000 UTC m=+772.921905307" Jan 23 08:33:09 crc kubenswrapper[4711]: I0123 08:33:09.724777 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-htpp8" Jan 23 08:33:14 crc kubenswrapper[4711]: I0123 08:33:14.968196 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-bzrxp" Jan 23 08:33:25 crc kubenswrapper[4711]: I0123 08:33:25.994268 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:33:25 crc kubenswrapper[4711]: I0123 08:33:25.994907 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.633381 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj"] Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.634567 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.637491 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.656403 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj"] Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.737414 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khhgd\" (UniqueName: \"kubernetes.io/projected/0e77f40d-e837-46f6-9a56-df9a7c911bfb-kube-api-access-khhgd\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj\" (UID: \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.737769 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0e77f40d-e837-46f6-9a56-df9a7c911bfb-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj\" (UID: \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.737874 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0e77f40d-e837-46f6-9a56-df9a7c911bfb-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj\" (UID: \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.839758 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0e77f40d-e837-46f6-9a56-df9a7c911bfb-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj\" (UID: \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.839846 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0e77f40d-e837-46f6-9a56-df9a7c911bfb-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj\" (UID: \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.839931 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khhgd\" (UniqueName: \"kubernetes.io/projected/0e77f40d-e837-46f6-9a56-df9a7c911bfb-kube-api-access-khhgd\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj\" (UID: \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.841040 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0e77f40d-e837-46f6-9a56-df9a7c911bfb-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj\" (UID: \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.841074 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0e77f40d-e837-46f6-9a56-df9a7c911bfb-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj\" (UID: \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.874914 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khhgd\" (UniqueName: \"kubernetes.io/projected/0e77f40d-e837-46f6-9a56-df9a7c911bfb-kube-api-access-khhgd\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj\" (UID: \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:27 crc kubenswrapper[4711]: I0123 08:33:27.977103 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:28 crc kubenswrapper[4711]: I0123 08:33:28.421982 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj"] Jan 23 08:33:28 crc kubenswrapper[4711]: I0123 08:33:28.461533 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" event={"ID":"0e77f40d-e837-46f6-9a56-df9a7c911bfb","Type":"ContainerStarted","Data":"925e23579df123cc3674e1c2068628ffdcec51fb67593b94e9983d8e96cd14dc"} Jan 23 08:33:29 crc kubenswrapper[4711]: I0123 08:33:29.467737 4711 generic.go:334] "Generic (PLEG): container finished" podID="0e77f40d-e837-46f6-9a56-df9a7c911bfb" containerID="7eadf14832eb8f69ba2ddf0129d5f5554e0db5b481901dc2b22732a25dc6d377" exitCode=0 Jan 23 08:33:29 crc kubenswrapper[4711]: I0123 08:33:29.467801 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" event={"ID":"0e77f40d-e837-46f6-9a56-df9a7c911bfb","Type":"ContainerDied","Data":"7eadf14832eb8f69ba2ddf0129d5f5554e0db5b481901dc2b22732a25dc6d377"} Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.408223 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-sk8zj" podUID="a7e00bfd-844d-4264-aff6-d2bdb6673084" containerName="console" containerID="cri-o://9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56" gracePeriod=15 Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.780001 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-sk8zj_a7e00bfd-844d-4264-aff6-d2bdb6673084/console/0.log" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.780362 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.885579 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-trusted-ca-bundle\") pod \"a7e00bfd-844d-4264-aff6-d2bdb6673084\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.885670 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-oauth-config\") pod \"a7e00bfd-844d-4264-aff6-d2bdb6673084\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.885715 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-oauth-serving-cert\") pod \"a7e00bfd-844d-4264-aff6-d2bdb6673084\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.885749 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pzpw\" (UniqueName: \"kubernetes.io/projected/a7e00bfd-844d-4264-aff6-d2bdb6673084-kube-api-access-4pzpw\") pod \"a7e00bfd-844d-4264-aff6-d2bdb6673084\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.885813 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-serving-cert\") pod \"a7e00bfd-844d-4264-aff6-d2bdb6673084\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.885868 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-config\") pod \"a7e00bfd-844d-4264-aff6-d2bdb6673084\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.885965 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-service-ca\") pod \"a7e00bfd-844d-4264-aff6-d2bdb6673084\" (UID: \"a7e00bfd-844d-4264-aff6-d2bdb6673084\") " Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.887108 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "a7e00bfd-844d-4264-aff6-d2bdb6673084" (UID: "a7e00bfd-844d-4264-aff6-d2bdb6673084"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.887216 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-service-ca" (OuterVolumeSpecName: "service-ca") pod "a7e00bfd-844d-4264-aff6-d2bdb6673084" (UID: "a7e00bfd-844d-4264-aff6-d2bdb6673084"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.887260 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-config" (OuterVolumeSpecName: "console-config") pod "a7e00bfd-844d-4264-aff6-d2bdb6673084" (UID: "a7e00bfd-844d-4264-aff6-d2bdb6673084"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.887326 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "a7e00bfd-844d-4264-aff6-d2bdb6673084" (UID: "a7e00bfd-844d-4264-aff6-d2bdb6673084"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.887438 4711 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-service-ca\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.887464 4711 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.887478 4711 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.887490 4711 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.893584 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "a7e00bfd-844d-4264-aff6-d2bdb6673084" (UID: "a7e00bfd-844d-4264-aff6-d2bdb6673084"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.894054 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "a7e00bfd-844d-4264-aff6-d2bdb6673084" (UID: "a7e00bfd-844d-4264-aff6-d2bdb6673084"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.895044 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7e00bfd-844d-4264-aff6-d2bdb6673084-kube-api-access-4pzpw" (OuterVolumeSpecName: "kube-api-access-4pzpw") pod "a7e00bfd-844d-4264-aff6-d2bdb6673084" (UID: "a7e00bfd-844d-4264-aff6-d2bdb6673084"). InnerVolumeSpecName "kube-api-access-4pzpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.989209 4711 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.989268 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pzpw\" (UniqueName: \"kubernetes.io/projected/a7e00bfd-844d-4264-aff6-d2bdb6673084-kube-api-access-4pzpw\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.989290 4711 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a7e00bfd-844d-4264-aff6-d2bdb6673084-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.990850 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7rwmz"] Jan 23 08:33:30 crc kubenswrapper[4711]: E0123 08:33:30.991149 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7e00bfd-844d-4264-aff6-d2bdb6673084" containerName="console" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.991168 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7e00bfd-844d-4264-aff6-d2bdb6673084" containerName="console" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.991409 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7e00bfd-844d-4264-aff6-d2bdb6673084" containerName="console" Jan 23 08:33:30 crc kubenswrapper[4711]: I0123 08:33:30.995059 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.009347 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7rwmz"] Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.090238 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pctqr\" (UniqueName: \"kubernetes.io/projected/8ef510b2-50a4-4def-a7ba-0195d03ce10a-kube-api-access-pctqr\") pod \"redhat-operators-7rwmz\" (UID: \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\") " pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.090357 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ef510b2-50a4-4def-a7ba-0195d03ce10a-utilities\") pod \"redhat-operators-7rwmz\" (UID: \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\") " pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.090481 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ef510b2-50a4-4def-a7ba-0195d03ce10a-catalog-content\") pod \"redhat-operators-7rwmz\" (UID: \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\") " pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.192023 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ef510b2-50a4-4def-a7ba-0195d03ce10a-catalog-content\") pod \"redhat-operators-7rwmz\" (UID: \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\") " pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.192078 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pctqr\" (UniqueName: \"kubernetes.io/projected/8ef510b2-50a4-4def-a7ba-0195d03ce10a-kube-api-access-pctqr\") pod \"redhat-operators-7rwmz\" (UID: \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\") " pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.192121 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ef510b2-50a4-4def-a7ba-0195d03ce10a-utilities\") pod \"redhat-operators-7rwmz\" (UID: \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\") " pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.192782 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ef510b2-50a4-4def-a7ba-0195d03ce10a-catalog-content\") pod \"redhat-operators-7rwmz\" (UID: \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\") " pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.192796 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ef510b2-50a4-4def-a7ba-0195d03ce10a-utilities\") pod \"redhat-operators-7rwmz\" (UID: \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\") " pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.212118 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pctqr\" (UniqueName: \"kubernetes.io/projected/8ef510b2-50a4-4def-a7ba-0195d03ce10a-kube-api-access-pctqr\") pod \"redhat-operators-7rwmz\" (UID: \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\") " pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.318439 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.490721 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-sk8zj_a7e00bfd-844d-4264-aff6-d2bdb6673084/console/0.log" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.491140 4711 generic.go:334] "Generic (PLEG): container finished" podID="a7e00bfd-844d-4264-aff6-d2bdb6673084" containerID="9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56" exitCode=2 Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.491177 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-sk8zj" event={"ID":"a7e00bfd-844d-4264-aff6-d2bdb6673084","Type":"ContainerDied","Data":"9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56"} Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.491217 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sk8zj" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.491233 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-sk8zj" event={"ID":"a7e00bfd-844d-4264-aff6-d2bdb6673084","Type":"ContainerDied","Data":"01efb0ced0d4fb94c7b6118295641b3d846760da66ddf13b1154229b1bd7afa0"} Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.491256 4711 scope.go:117] "RemoveContainer" containerID="9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56" Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.539473 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-sk8zj"] Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.543829 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-sk8zj"] Jan 23 08:33:31 crc kubenswrapper[4711]: I0123 08:33:31.565225 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7rwmz"] Jan 23 08:33:33 crc kubenswrapper[4711]: I0123 08:33:33.481874 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7e00bfd-844d-4264-aff6-d2bdb6673084" path="/var/lib/kubelet/pods/a7e00bfd-844d-4264-aff6-d2bdb6673084/volumes" Jan 23 08:33:35 crc kubenswrapper[4711]: I0123 08:33:35.583743 4711 scope.go:117] "RemoveContainer" containerID="9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56" Jan 23 08:33:35 crc kubenswrapper[4711]: E0123 08:33:35.585721 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56\": container with ID starting with 9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56 not found: ID does not exist" containerID="9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56" Jan 23 08:33:35 crc kubenswrapper[4711]: I0123 08:33:35.585805 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56"} err="failed to get container status \"9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56\": rpc error: code = NotFound desc = could not find container \"9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56\": container with ID starting with 9598aea1389a4771d9f37145375b3dd00159e2c7f558df2c808551163d20fc56 not found: ID does not exist" Jan 23 08:33:36 crc kubenswrapper[4711]: I0123 08:33:36.525113 4711 generic.go:334] "Generic (PLEG): container finished" podID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" containerID="2c337100d931921a140e8566a428b880593979db969f75b804c318ae338fec40" exitCode=0 Jan 23 08:33:36 crc kubenswrapper[4711]: I0123 08:33:36.525201 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7rwmz" event={"ID":"8ef510b2-50a4-4def-a7ba-0195d03ce10a","Type":"ContainerDied","Data":"2c337100d931921a140e8566a428b880593979db969f75b804c318ae338fec40"} Jan 23 08:33:36 crc kubenswrapper[4711]: I0123 08:33:36.525558 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7rwmz" event={"ID":"8ef510b2-50a4-4def-a7ba-0195d03ce10a","Type":"ContainerStarted","Data":"3494852def5c5b9c5e93c7ef5f6d062da4970f3b822731285f8481b0dbaac70b"} Jan 23 08:33:37 crc kubenswrapper[4711]: I0123 08:33:37.535998 4711 generic.go:334] "Generic (PLEG): container finished" podID="0e77f40d-e837-46f6-9a56-df9a7c911bfb" containerID="e8387120bffe38e70bb7191290e670a501255243c1499654bd80de564e290630" exitCode=0 Jan 23 08:33:37 crc kubenswrapper[4711]: I0123 08:33:37.536092 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" event={"ID":"0e77f40d-e837-46f6-9a56-df9a7c911bfb","Type":"ContainerDied","Data":"e8387120bffe38e70bb7191290e670a501255243c1499654bd80de564e290630"} Jan 23 08:33:38 crc kubenswrapper[4711]: I0123 08:33:38.546334 4711 generic.go:334] "Generic (PLEG): container finished" podID="0e77f40d-e837-46f6-9a56-df9a7c911bfb" containerID="8b7636f4f09d4ce747c440931006f35f0e0c71886ac6c1c243d902e0fa7214c9" exitCode=0 Jan 23 08:33:38 crc kubenswrapper[4711]: I0123 08:33:38.546417 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" event={"ID":"0e77f40d-e837-46f6-9a56-df9a7c911bfb","Type":"ContainerDied","Data":"8b7636f4f09d4ce747c440931006f35f0e0c71886ac6c1c243d902e0fa7214c9"} Jan 23 08:33:38 crc kubenswrapper[4711]: I0123 08:33:38.549866 4711 generic.go:334] "Generic (PLEG): container finished" podID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" containerID="3b1727131c133052fdef6721cb6a3e71eb03cb6bfec2f7139784fde752b8b258" exitCode=0 Jan 23 08:33:38 crc kubenswrapper[4711]: I0123 08:33:38.549947 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7rwmz" event={"ID":"8ef510b2-50a4-4def-a7ba-0195d03ce10a","Type":"ContainerDied","Data":"3b1727131c133052fdef6721cb6a3e71eb03cb6bfec2f7139784fde752b8b258"} Jan 23 08:33:39 crc kubenswrapper[4711]: I0123 08:33:39.558117 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7rwmz" event={"ID":"8ef510b2-50a4-4def-a7ba-0195d03ce10a","Type":"ContainerStarted","Data":"91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6"} Jan 23 08:33:39 crc kubenswrapper[4711]: I0123 08:33:39.828962 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:39 crc kubenswrapper[4711]: I0123 08:33:39.850313 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7rwmz" podStartSLOduration=7.511518097 podStartE2EDuration="9.850290661s" podCreationTimestamp="2026-01-23 08:33:30 +0000 UTC" firstStartedPulling="2026-01-23 08:33:36.663391619 +0000 UTC m=+802.236347997" lastFinishedPulling="2026-01-23 08:33:39.002164193 +0000 UTC m=+804.575120561" observedRunningTime="2026-01-23 08:33:39.576563914 +0000 UTC m=+805.149520282" watchObservedRunningTime="2026-01-23 08:33:39.850290661 +0000 UTC m=+805.423247049" Jan 23 08:33:39 crc kubenswrapper[4711]: I0123 08:33:39.909025 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0e77f40d-e837-46f6-9a56-df9a7c911bfb-bundle\") pod \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\" (UID: \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\") " Jan 23 08:33:39 crc kubenswrapper[4711]: I0123 08:33:39.909111 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0e77f40d-e837-46f6-9a56-df9a7c911bfb-util\") pod \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\" (UID: \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\") " Jan 23 08:33:39 crc kubenswrapper[4711]: I0123 08:33:39.909182 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khhgd\" (UniqueName: \"kubernetes.io/projected/0e77f40d-e837-46f6-9a56-df9a7c911bfb-kube-api-access-khhgd\") pod \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\" (UID: \"0e77f40d-e837-46f6-9a56-df9a7c911bfb\") " Jan 23 08:33:39 crc kubenswrapper[4711]: I0123 08:33:39.911602 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e77f40d-e837-46f6-9a56-df9a7c911bfb-bundle" (OuterVolumeSpecName: "bundle") pod "0e77f40d-e837-46f6-9a56-df9a7c911bfb" (UID: "0e77f40d-e837-46f6-9a56-df9a7c911bfb"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:33:39 crc kubenswrapper[4711]: I0123 08:33:39.920709 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e77f40d-e837-46f6-9a56-df9a7c911bfb-util" (OuterVolumeSpecName: "util") pod "0e77f40d-e837-46f6-9a56-df9a7c911bfb" (UID: "0e77f40d-e837-46f6-9a56-df9a7c911bfb"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:33:39 crc kubenswrapper[4711]: I0123 08:33:39.921941 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e77f40d-e837-46f6-9a56-df9a7c911bfb-kube-api-access-khhgd" (OuterVolumeSpecName: "kube-api-access-khhgd") pod "0e77f40d-e837-46f6-9a56-df9a7c911bfb" (UID: "0e77f40d-e837-46f6-9a56-df9a7c911bfb"). InnerVolumeSpecName "kube-api-access-khhgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:33:40 crc kubenswrapper[4711]: I0123 08:33:40.011079 4711 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0e77f40d-e837-46f6-9a56-df9a7c911bfb-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:40 crc kubenswrapper[4711]: I0123 08:33:40.011121 4711 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0e77f40d-e837-46f6-9a56-df9a7c911bfb-util\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:40 crc kubenswrapper[4711]: I0123 08:33:40.011134 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khhgd\" (UniqueName: \"kubernetes.io/projected/0e77f40d-e837-46f6-9a56-df9a7c911bfb-kube-api-access-khhgd\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:40 crc kubenswrapper[4711]: I0123 08:33:40.568317 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" event={"ID":"0e77f40d-e837-46f6-9a56-df9a7c911bfb","Type":"ContainerDied","Data":"925e23579df123cc3674e1c2068628ffdcec51fb67593b94e9983d8e96cd14dc"} Jan 23 08:33:40 crc kubenswrapper[4711]: I0123 08:33:40.568342 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj" Jan 23 08:33:40 crc kubenswrapper[4711]: I0123 08:33:40.568362 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="925e23579df123cc3674e1c2068628ffdcec51fb67593b94e9983d8e96cd14dc" Jan 23 08:33:41 crc kubenswrapper[4711]: I0123 08:33:41.318850 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:41 crc kubenswrapper[4711]: I0123 08:33:41.318917 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:42 crc kubenswrapper[4711]: I0123 08:33:42.358660 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7rwmz" podUID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" containerName="registry-server" probeResult="failure" output=< Jan 23 08:33:42 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Jan 23 08:33:42 crc kubenswrapper[4711]: > Jan 23 08:33:51 crc kubenswrapper[4711]: I0123 08:33:51.359126 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:51 crc kubenswrapper[4711]: I0123 08:33:51.407563 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.641650 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd"] Jan 23 08:33:53 crc kubenswrapper[4711]: E0123 08:33:53.642710 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e77f40d-e837-46f6-9a56-df9a7c911bfb" containerName="util" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.642732 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e77f40d-e837-46f6-9a56-df9a7c911bfb" containerName="util" Jan 23 08:33:53 crc kubenswrapper[4711]: E0123 08:33:53.642765 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e77f40d-e837-46f6-9a56-df9a7c911bfb" containerName="pull" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.642777 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e77f40d-e837-46f6-9a56-df9a7c911bfb" containerName="pull" Jan 23 08:33:53 crc kubenswrapper[4711]: E0123 08:33:53.642788 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e77f40d-e837-46f6-9a56-df9a7c911bfb" containerName="extract" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.642798 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e77f40d-e837-46f6-9a56-df9a7c911bfb" containerName="extract" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.642951 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e77f40d-e837-46f6-9a56-df9a7c911bfb" containerName="extract" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.643743 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.648535 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.649300 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.649626 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-hxmg2" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.649805 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.651558 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.668610 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd"] Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.783879 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snjnv\" (UniqueName: \"kubernetes.io/projected/11234ef0-6e4d-4b5c-9eeb-d1e37185edb1-kube-api-access-snjnv\") pod \"metallb-operator-controller-manager-565698bc67-8zpqd\" (UID: \"11234ef0-6e4d-4b5c-9eeb-d1e37185edb1\") " pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.783956 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/11234ef0-6e4d-4b5c-9eeb-d1e37185edb1-webhook-cert\") pod \"metallb-operator-controller-manager-565698bc67-8zpqd\" (UID: \"11234ef0-6e4d-4b5c-9eeb-d1e37185edb1\") " pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.783992 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/11234ef0-6e4d-4b5c-9eeb-d1e37185edb1-apiservice-cert\") pod \"metallb-operator-controller-manager-565698bc67-8zpqd\" (UID: \"11234ef0-6e4d-4b5c-9eeb-d1e37185edb1\") " pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.885636 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/11234ef0-6e4d-4b5c-9eeb-d1e37185edb1-webhook-cert\") pod \"metallb-operator-controller-manager-565698bc67-8zpqd\" (UID: \"11234ef0-6e4d-4b5c-9eeb-d1e37185edb1\") " pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.885694 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snjnv\" (UniqueName: \"kubernetes.io/projected/11234ef0-6e4d-4b5c-9eeb-d1e37185edb1-kube-api-access-snjnv\") pod \"metallb-operator-controller-manager-565698bc67-8zpqd\" (UID: \"11234ef0-6e4d-4b5c-9eeb-d1e37185edb1\") " pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.885715 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/11234ef0-6e4d-4b5c-9eeb-d1e37185edb1-apiservice-cert\") pod \"metallb-operator-controller-manager-565698bc67-8zpqd\" (UID: \"11234ef0-6e4d-4b5c-9eeb-d1e37185edb1\") " pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.893628 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/11234ef0-6e4d-4b5c-9eeb-d1e37185edb1-apiservice-cert\") pod \"metallb-operator-controller-manager-565698bc67-8zpqd\" (UID: \"11234ef0-6e4d-4b5c-9eeb-d1e37185edb1\") " pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.904243 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snjnv\" (UniqueName: \"kubernetes.io/projected/11234ef0-6e4d-4b5c-9eeb-d1e37185edb1-kube-api-access-snjnv\") pod \"metallb-operator-controller-manager-565698bc67-8zpqd\" (UID: \"11234ef0-6e4d-4b5c-9eeb-d1e37185edb1\") " pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.908445 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/11234ef0-6e4d-4b5c-9eeb-d1e37185edb1-webhook-cert\") pod \"metallb-operator-controller-manager-565698bc67-8zpqd\" (UID: \"11234ef0-6e4d-4b5c-9eeb-d1e37185edb1\") " pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.967365 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.990416 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd"] Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.991275 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.995944 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-vnpg8" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.996182 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 23 08:33:53 crc kubenswrapper[4711]: I0123 08:33:53.996265 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.013210 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd"] Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.092399 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tft22\" (UniqueName: \"kubernetes.io/projected/413bb353-7a3e-4b0f-a146-414e9aa93903-kube-api-access-tft22\") pod \"metallb-operator-webhook-server-6548fb4d7d-g45jd\" (UID: \"413bb353-7a3e-4b0f-a146-414e9aa93903\") " pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.092463 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/413bb353-7a3e-4b0f-a146-414e9aa93903-webhook-cert\") pod \"metallb-operator-webhook-server-6548fb4d7d-g45jd\" (UID: \"413bb353-7a3e-4b0f-a146-414e9aa93903\") " pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.092487 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/413bb353-7a3e-4b0f-a146-414e9aa93903-apiservice-cert\") pod \"metallb-operator-webhook-server-6548fb4d7d-g45jd\" (UID: \"413bb353-7a3e-4b0f-a146-414e9aa93903\") " pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.194176 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tft22\" (UniqueName: \"kubernetes.io/projected/413bb353-7a3e-4b0f-a146-414e9aa93903-kube-api-access-tft22\") pod \"metallb-operator-webhook-server-6548fb4d7d-g45jd\" (UID: \"413bb353-7a3e-4b0f-a146-414e9aa93903\") " pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.194298 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/413bb353-7a3e-4b0f-a146-414e9aa93903-webhook-cert\") pod \"metallb-operator-webhook-server-6548fb4d7d-g45jd\" (UID: \"413bb353-7a3e-4b0f-a146-414e9aa93903\") " pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.194396 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/413bb353-7a3e-4b0f-a146-414e9aa93903-apiservice-cert\") pod \"metallb-operator-webhook-server-6548fb4d7d-g45jd\" (UID: \"413bb353-7a3e-4b0f-a146-414e9aa93903\") " pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.201069 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/413bb353-7a3e-4b0f-a146-414e9aa93903-webhook-cert\") pod \"metallb-operator-webhook-server-6548fb4d7d-g45jd\" (UID: \"413bb353-7a3e-4b0f-a146-414e9aa93903\") " pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.202782 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/413bb353-7a3e-4b0f-a146-414e9aa93903-apiservice-cert\") pod \"metallb-operator-webhook-server-6548fb4d7d-g45jd\" (UID: \"413bb353-7a3e-4b0f-a146-414e9aa93903\") " pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.244367 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tft22\" (UniqueName: \"kubernetes.io/projected/413bb353-7a3e-4b0f-a146-414e9aa93903-kube-api-access-tft22\") pod \"metallb-operator-webhook-server-6548fb4d7d-g45jd\" (UID: \"413bb353-7a3e-4b0f-a146-414e9aa93903\") " pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.320236 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd"] Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.344782 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.376044 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7rwmz"] Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.376286 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7rwmz" podUID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" containerName="registry-server" containerID="cri-o://91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6" gracePeriod=2 Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.655932 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" event={"ID":"11234ef0-6e4d-4b5c-9eeb-d1e37185edb1","Type":"ContainerStarted","Data":"e60eeea22f29a920b940cfa3a51b5f3557f1ad7fa8350582a7d619b3e5f0982a"} Jan 23 08:33:54 crc kubenswrapper[4711]: W0123 08:33:54.807305 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod413bb353_7a3e_4b0f_a146_414e9aa93903.slice/crio-c22779e08498944b6c6fba0ee7159ac6714524123145090d9c4390a57d815fea WatchSource:0}: Error finding container c22779e08498944b6c6fba0ee7159ac6714524123145090d9c4390a57d815fea: Status 404 returned error can't find the container with id c22779e08498944b6c6fba0ee7159ac6714524123145090d9c4390a57d815fea Jan 23 08:33:54 crc kubenswrapper[4711]: I0123 08:33:54.808099 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd"] Jan 23 08:33:55 crc kubenswrapper[4711]: I0123 08:33:55.665360 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" event={"ID":"413bb353-7a3e-4b0f-a146-414e9aa93903","Type":"ContainerStarted","Data":"c22779e08498944b6c6fba0ee7159ac6714524123145090d9c4390a57d815fea"} Jan 23 08:33:55 crc kubenswrapper[4711]: I0123 08:33:55.995218 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:33:55 crc kubenswrapper[4711]: I0123 08:33:55.995289 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.588774 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.630151 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pctqr\" (UniqueName: \"kubernetes.io/projected/8ef510b2-50a4-4def-a7ba-0195d03ce10a-kube-api-access-pctqr\") pod \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\" (UID: \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\") " Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.630235 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ef510b2-50a4-4def-a7ba-0195d03ce10a-utilities\") pod \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\" (UID: \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\") " Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.630362 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ef510b2-50a4-4def-a7ba-0195d03ce10a-catalog-content\") pod \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\" (UID: \"8ef510b2-50a4-4def-a7ba-0195d03ce10a\") " Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.635487 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ef510b2-50a4-4def-a7ba-0195d03ce10a-utilities" (OuterVolumeSpecName: "utilities") pod "8ef510b2-50a4-4def-a7ba-0195d03ce10a" (UID: "8ef510b2-50a4-4def-a7ba-0195d03ce10a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.640652 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ef510b2-50a4-4def-a7ba-0195d03ce10a-kube-api-access-pctqr" (OuterVolumeSpecName: "kube-api-access-pctqr") pod "8ef510b2-50a4-4def-a7ba-0195d03ce10a" (UID: "8ef510b2-50a4-4def-a7ba-0195d03ce10a"). InnerVolumeSpecName "kube-api-access-pctqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.673700 4711 generic.go:334] "Generic (PLEG): container finished" podID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" containerID="91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6" exitCode=0 Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.673751 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7rwmz" event={"ID":"8ef510b2-50a4-4def-a7ba-0195d03ce10a","Type":"ContainerDied","Data":"91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6"} Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.673785 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7rwmz" event={"ID":"8ef510b2-50a4-4def-a7ba-0195d03ce10a","Type":"ContainerDied","Data":"3494852def5c5b9c5e93c7ef5f6d062da4970f3b822731285f8481b0dbaac70b"} Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.673805 4711 scope.go:117] "RemoveContainer" containerID="91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.673922 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7rwmz" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.707251 4711 scope.go:117] "RemoveContainer" containerID="3b1727131c133052fdef6721cb6a3e71eb03cb6bfec2f7139784fde752b8b258" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.732111 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pctqr\" (UniqueName: \"kubernetes.io/projected/8ef510b2-50a4-4def-a7ba-0195d03ce10a-kube-api-access-pctqr\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.732151 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ef510b2-50a4-4def-a7ba-0195d03ce10a-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.735985 4711 scope.go:117] "RemoveContainer" containerID="2c337100d931921a140e8566a428b880593979db969f75b804c318ae338fec40" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.754553 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ef510b2-50a4-4def-a7ba-0195d03ce10a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8ef510b2-50a4-4def-a7ba-0195d03ce10a" (UID: "8ef510b2-50a4-4def-a7ba-0195d03ce10a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.766135 4711 scope.go:117] "RemoveContainer" containerID="91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6" Jan 23 08:33:56 crc kubenswrapper[4711]: E0123 08:33:56.766826 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6\": container with ID starting with 91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6 not found: ID does not exist" containerID="91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.766868 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6"} err="failed to get container status \"91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6\": rpc error: code = NotFound desc = could not find container \"91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6\": container with ID starting with 91452c78b753604a11be9c17ae20960c9142e596ade40c9af132c3797970c8f6 not found: ID does not exist" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.766914 4711 scope.go:117] "RemoveContainer" containerID="3b1727131c133052fdef6721cb6a3e71eb03cb6bfec2f7139784fde752b8b258" Jan 23 08:33:56 crc kubenswrapper[4711]: E0123 08:33:56.767493 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b1727131c133052fdef6721cb6a3e71eb03cb6bfec2f7139784fde752b8b258\": container with ID starting with 3b1727131c133052fdef6721cb6a3e71eb03cb6bfec2f7139784fde752b8b258 not found: ID does not exist" containerID="3b1727131c133052fdef6721cb6a3e71eb03cb6bfec2f7139784fde752b8b258" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.767549 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b1727131c133052fdef6721cb6a3e71eb03cb6bfec2f7139784fde752b8b258"} err="failed to get container status \"3b1727131c133052fdef6721cb6a3e71eb03cb6bfec2f7139784fde752b8b258\": rpc error: code = NotFound desc = could not find container \"3b1727131c133052fdef6721cb6a3e71eb03cb6bfec2f7139784fde752b8b258\": container with ID starting with 3b1727131c133052fdef6721cb6a3e71eb03cb6bfec2f7139784fde752b8b258 not found: ID does not exist" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.767589 4711 scope.go:117] "RemoveContainer" containerID="2c337100d931921a140e8566a428b880593979db969f75b804c318ae338fec40" Jan 23 08:33:56 crc kubenswrapper[4711]: E0123 08:33:56.768043 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c337100d931921a140e8566a428b880593979db969f75b804c318ae338fec40\": container with ID starting with 2c337100d931921a140e8566a428b880593979db969f75b804c318ae338fec40 not found: ID does not exist" containerID="2c337100d931921a140e8566a428b880593979db969f75b804c318ae338fec40" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.768065 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c337100d931921a140e8566a428b880593979db969f75b804c318ae338fec40"} err="failed to get container status \"2c337100d931921a140e8566a428b880593979db969f75b804c318ae338fec40\": rpc error: code = NotFound desc = could not find container \"2c337100d931921a140e8566a428b880593979db969f75b804c318ae338fec40\": container with ID starting with 2c337100d931921a140e8566a428b880593979db969f75b804c318ae338fec40 not found: ID does not exist" Jan 23 08:33:56 crc kubenswrapper[4711]: I0123 08:33:56.834436 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ef510b2-50a4-4def-a7ba-0195d03ce10a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:33:57 crc kubenswrapper[4711]: I0123 08:33:57.005890 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7rwmz"] Jan 23 08:33:57 crc kubenswrapper[4711]: I0123 08:33:57.012919 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7rwmz"] Jan 23 08:33:57 crc kubenswrapper[4711]: I0123 08:33:57.487906 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" path="/var/lib/kubelet/pods/8ef510b2-50a4-4def-a7ba-0195d03ce10a/volumes" Jan 23 08:34:00 crc kubenswrapper[4711]: I0123 08:34:00.710710 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" event={"ID":"11234ef0-6e4d-4b5c-9eeb-d1e37185edb1","Type":"ContainerStarted","Data":"28101ed0894dbd84150ef936bdc1a470c0a3ee6a34bcf7030a98dc18b9b727f3"} Jan 23 08:34:00 crc kubenswrapper[4711]: I0123 08:34:00.711272 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:34:00 crc kubenswrapper[4711]: I0123 08:34:00.712638 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" event={"ID":"413bb353-7a3e-4b0f-a146-414e9aa93903","Type":"ContainerStarted","Data":"def02fcb46d488d443d9d5930cf9fa5acedccf41d39499bbf16ae793247ed00d"} Jan 23 08:34:00 crc kubenswrapper[4711]: I0123 08:34:00.713206 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:34:00 crc kubenswrapper[4711]: I0123 08:34:00.737112 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" podStartSLOduration=1.841958449 podStartE2EDuration="7.737095463s" podCreationTimestamp="2026-01-23 08:33:53 +0000 UTC" firstStartedPulling="2026-01-23 08:33:54.32977027 +0000 UTC m=+819.902726638" lastFinishedPulling="2026-01-23 08:34:00.224907284 +0000 UTC m=+825.797863652" observedRunningTime="2026-01-23 08:34:00.734706805 +0000 UTC m=+826.307663183" watchObservedRunningTime="2026-01-23 08:34:00.737095463 +0000 UTC m=+826.310051831" Jan 23 08:34:14 crc kubenswrapper[4711]: I0123 08:34:14.350799 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" Jan 23 08:34:14 crc kubenswrapper[4711]: I0123 08:34:14.378570 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6548fb4d7d-g45jd" podStartSLOduration=15.948740355 podStartE2EDuration="21.378552301s" podCreationTimestamp="2026-01-23 08:33:53 +0000 UTC" firstStartedPulling="2026-01-23 08:33:54.809825468 +0000 UTC m=+820.382781826" lastFinishedPulling="2026-01-23 08:34:00.239637404 +0000 UTC m=+825.812593772" observedRunningTime="2026-01-23 08:34:00.77128093 +0000 UTC m=+826.344237298" watchObservedRunningTime="2026-01-23 08:34:14.378552301 +0000 UTC m=+839.951508669" Jan 23 08:34:25 crc kubenswrapper[4711]: I0123 08:34:25.994182 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:34:25 crc kubenswrapper[4711]: I0123 08:34:25.994619 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:34:25 crc kubenswrapper[4711]: I0123 08:34:25.994663 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:34:25 crc kubenswrapper[4711]: I0123 08:34:25.995175 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"09dfb01b93dac17d4e6980fd3e7ea0054118ce3392de559f35289e1fef65f8f9"} pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 23 08:34:25 crc kubenswrapper[4711]: I0123 08:34:25.995228 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" containerID="cri-o://09dfb01b93dac17d4e6980fd3e7ea0054118ce3392de559f35289e1fef65f8f9" gracePeriod=600 Jan 23 08:34:26 crc kubenswrapper[4711]: I0123 08:34:26.870029 4711 generic.go:334] "Generic (PLEG): container finished" podID="3846d4e0-cfda-4e0b-8747-85267de12736" containerID="09dfb01b93dac17d4e6980fd3e7ea0054118ce3392de559f35289e1fef65f8f9" exitCode=0 Jan 23 08:34:26 crc kubenswrapper[4711]: I0123 08:34:26.870121 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerDied","Data":"09dfb01b93dac17d4e6980fd3e7ea0054118ce3392de559f35289e1fef65f8f9"} Jan 23 08:34:26 crc kubenswrapper[4711]: I0123 08:34:26.870380 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"ba3caaa2b687a97a0322bba7ecb4eece08eed4af73c49fa085ce275a1fc9329c"} Jan 23 08:34:26 crc kubenswrapper[4711]: I0123 08:34:26.870402 4711 scope.go:117] "RemoveContainer" containerID="dcb49a247b6f118496bffafa6bebed1af2fd5b9f478c43e4444025730a1bbd84" Jan 23 08:34:33 crc kubenswrapper[4711]: I0123 08:34:33.970634 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-565698bc67-8zpqd" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.857459 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp"] Jan 23 08:34:34 crc kubenswrapper[4711]: E0123 08:34:34.857746 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" containerName="extract-utilities" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.857761 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" containerName="extract-utilities" Jan 23 08:34:34 crc kubenswrapper[4711]: E0123 08:34:34.857775 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" containerName="registry-server" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.857783 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" containerName="registry-server" Jan 23 08:34:34 crc kubenswrapper[4711]: E0123 08:34:34.857799 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" containerName="extract-content" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.857808 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" containerName="extract-content" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.857972 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef510b2-50a4-4def-a7ba-0195d03ce10a" containerName="registry-server" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.858424 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.862084 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.862247 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-tghtl" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.868069 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-26kvs"] Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.870873 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.872378 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.872879 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.882680 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp"] Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.949197 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/81ea7880-2a6c-4e0a-8489-063feb2f99b7-frr-startup\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.949730 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/81ea7880-2a6c-4e0a-8489-063feb2f99b7-metrics\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.949801 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb216e80-98b3-46dc-b45e-86407aa0fc7b-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-th8jp\" (UID: \"cb216e80-98b3-46dc-b45e-86407aa0fc7b\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.949830 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c92b6\" (UniqueName: \"kubernetes.io/projected/cb216e80-98b3-46dc-b45e-86407aa0fc7b-kube-api-access-c92b6\") pod \"frr-k8s-webhook-server-7df86c4f6c-th8jp\" (UID: \"cb216e80-98b3-46dc-b45e-86407aa0fc7b\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.949869 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/81ea7880-2a6c-4e0a-8489-063feb2f99b7-metrics-certs\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.949884 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/81ea7880-2a6c-4e0a-8489-063feb2f99b7-frr-sockets\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.949911 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tmvl\" (UniqueName: \"kubernetes.io/projected/81ea7880-2a6c-4e0a-8489-063feb2f99b7-kube-api-access-5tmvl\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.949967 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/81ea7880-2a6c-4e0a-8489-063feb2f99b7-reloader\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.950000 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/81ea7880-2a6c-4e0a-8489-063feb2f99b7-frr-conf\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.956027 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-kkqsn"] Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.958391 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-kkqsn" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.961998 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.962287 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-8l7fd" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.962424 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.962572 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.984640 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-jz2p2"] Jan 23 08:34:34 crc kubenswrapper[4711]: I0123 08:34:34.985767 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.008615 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-jz2p2"] Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.009958 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.050917 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/81ea7880-2a6c-4e0a-8489-063feb2f99b7-reloader\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051286 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skl4x\" (UniqueName: \"kubernetes.io/projected/65ea66aa-64dd-4c97-9d69-4984dac21b0f-kube-api-access-skl4x\") pod \"controller-6968d8fdc4-jz2p2\" (UID: \"65ea66aa-64dd-4c97-9d69-4984dac21b0f\") " pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051315 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/65ea66aa-64dd-4c97-9d69-4984dac21b0f-cert\") pod \"controller-6968d8fdc4-jz2p2\" (UID: \"65ea66aa-64dd-4c97-9d69-4984dac21b0f\") " pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051350 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbdfv\" (UniqueName: \"kubernetes.io/projected/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-kube-api-access-lbdfv\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051385 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65ea66aa-64dd-4c97-9d69-4984dac21b0f-metrics-certs\") pod \"controller-6968d8fdc4-jz2p2\" (UID: \"65ea66aa-64dd-4c97-9d69-4984dac21b0f\") " pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051468 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/81ea7880-2a6c-4e0a-8489-063feb2f99b7-reloader\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051522 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/81ea7880-2a6c-4e0a-8489-063feb2f99b7-frr-conf\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051657 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-metallb-excludel2\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051714 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/81ea7880-2a6c-4e0a-8489-063feb2f99b7-frr-startup\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051755 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-memberlist\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051791 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb216e80-98b3-46dc-b45e-86407aa0fc7b-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-th8jp\" (UID: \"cb216e80-98b3-46dc-b45e-86407aa0fc7b\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051816 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/81ea7880-2a6c-4e0a-8489-063feb2f99b7-metrics\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051863 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c92b6\" (UniqueName: \"kubernetes.io/projected/cb216e80-98b3-46dc-b45e-86407aa0fc7b-kube-api-access-c92b6\") pod \"frr-k8s-webhook-server-7df86c4f6c-th8jp\" (UID: \"cb216e80-98b3-46dc-b45e-86407aa0fc7b\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051899 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/81ea7880-2a6c-4e0a-8489-063feb2f99b7-metrics-certs\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051929 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/81ea7880-2a6c-4e0a-8489-063feb2f99b7-frr-sockets\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051970 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/81ea7880-2a6c-4e0a-8489-063feb2f99b7-frr-conf\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.051980 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tmvl\" (UniqueName: \"kubernetes.io/projected/81ea7880-2a6c-4e0a-8489-063feb2f99b7-kube-api-access-5tmvl\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.052048 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-metrics-certs\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.052211 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/81ea7880-2a6c-4e0a-8489-063feb2f99b7-metrics\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: E0123 08:34:35.052770 4711 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.052801 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/81ea7880-2a6c-4e0a-8489-063feb2f99b7-frr-sockets\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: E0123 08:34:35.052858 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/81ea7880-2a6c-4e0a-8489-063feb2f99b7-metrics-certs podName:81ea7880-2a6c-4e0a-8489-063feb2f99b7 nodeName:}" failed. No retries permitted until 2026-01-23 08:34:35.552834138 +0000 UTC m=+861.125790566 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/81ea7880-2a6c-4e0a-8489-063feb2f99b7-metrics-certs") pod "frr-k8s-26kvs" (UID: "81ea7880-2a6c-4e0a-8489-063feb2f99b7") : secret "frr-k8s-certs-secret" not found Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.053068 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/81ea7880-2a6c-4e0a-8489-063feb2f99b7-frr-startup\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.063232 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cb216e80-98b3-46dc-b45e-86407aa0fc7b-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-th8jp\" (UID: \"cb216e80-98b3-46dc-b45e-86407aa0fc7b\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.068824 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tmvl\" (UniqueName: \"kubernetes.io/projected/81ea7880-2a6c-4e0a-8489-063feb2f99b7-kube-api-access-5tmvl\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.070045 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c92b6\" (UniqueName: \"kubernetes.io/projected/cb216e80-98b3-46dc-b45e-86407aa0fc7b-kube-api-access-c92b6\") pod \"frr-k8s-webhook-server-7df86c4f6c-th8jp\" (UID: \"cb216e80-98b3-46dc-b45e-86407aa0fc7b\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.153318 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-metrics-certs\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.153381 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skl4x\" (UniqueName: \"kubernetes.io/projected/65ea66aa-64dd-4c97-9d69-4984dac21b0f-kube-api-access-skl4x\") pod \"controller-6968d8fdc4-jz2p2\" (UID: \"65ea66aa-64dd-4c97-9d69-4984dac21b0f\") " pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.153397 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/65ea66aa-64dd-4c97-9d69-4984dac21b0f-cert\") pod \"controller-6968d8fdc4-jz2p2\" (UID: \"65ea66aa-64dd-4c97-9d69-4984dac21b0f\") " pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.153419 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbdfv\" (UniqueName: \"kubernetes.io/projected/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-kube-api-access-lbdfv\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.153440 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65ea66aa-64dd-4c97-9d69-4984dac21b0f-metrics-certs\") pod \"controller-6968d8fdc4-jz2p2\" (UID: \"65ea66aa-64dd-4c97-9d69-4984dac21b0f\") " pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.153462 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-metallb-excludel2\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.153492 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-memberlist\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:35 crc kubenswrapper[4711]: E0123 08:34:35.153653 4711 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 23 08:34:35 crc kubenswrapper[4711]: E0123 08:34:35.153697 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-memberlist podName:f1cfbcc9-7d53-4ec2-806c-c6070123c2fe nodeName:}" failed. No retries permitted until 2026-01-23 08:34:35.653683605 +0000 UTC m=+861.226639973 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-memberlist") pod "speaker-kkqsn" (UID: "f1cfbcc9-7d53-4ec2-806c-c6070123c2fe") : secret "metallb-memberlist" not found Jan 23 08:34:35 crc kubenswrapper[4711]: E0123 08:34:35.154459 4711 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Jan 23 08:34:35 crc kubenswrapper[4711]: E0123 08:34:35.154495 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/65ea66aa-64dd-4c97-9d69-4984dac21b0f-metrics-certs podName:65ea66aa-64dd-4c97-9d69-4984dac21b0f nodeName:}" failed. No retries permitted until 2026-01-23 08:34:35.654486405 +0000 UTC m=+861.227442773 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/65ea66aa-64dd-4c97-9d69-4984dac21b0f-metrics-certs") pod "controller-6968d8fdc4-jz2p2" (UID: "65ea66aa-64dd-4c97-9d69-4984dac21b0f") : secret "controller-certs-secret" not found Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.155165 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-metallb-excludel2\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.156530 4711 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.157646 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-metrics-certs\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.174452 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skl4x\" (UniqueName: \"kubernetes.io/projected/65ea66aa-64dd-4c97-9d69-4984dac21b0f-kube-api-access-skl4x\") pod \"controller-6968d8fdc4-jz2p2\" (UID: \"65ea66aa-64dd-4c97-9d69-4984dac21b0f\") " pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.175050 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/65ea66aa-64dd-4c97-9d69-4984dac21b0f-cert\") pod \"controller-6968d8fdc4-jz2p2\" (UID: \"65ea66aa-64dd-4c97-9d69-4984dac21b0f\") " pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.175467 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbdfv\" (UniqueName: \"kubernetes.io/projected/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-kube-api-access-lbdfv\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.226084 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.563037 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/81ea7880-2a6c-4e0a-8489-063feb2f99b7-metrics-certs\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.568198 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/81ea7880-2a6c-4e0a-8489-063feb2f99b7-metrics-certs\") pod \"frr-k8s-26kvs\" (UID: \"81ea7880-2a6c-4e0a-8489-063feb2f99b7\") " pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.626174 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp"] Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.664729 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65ea66aa-64dd-4c97-9d69-4984dac21b0f-metrics-certs\") pod \"controller-6968d8fdc4-jz2p2\" (UID: \"65ea66aa-64dd-4c97-9d69-4984dac21b0f\") " pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.664806 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-memberlist\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:35 crc kubenswrapper[4711]: E0123 08:34:35.664988 4711 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 23 08:34:35 crc kubenswrapper[4711]: E0123 08:34:35.665041 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-memberlist podName:f1cfbcc9-7d53-4ec2-806c-c6070123c2fe nodeName:}" failed. No retries permitted until 2026-01-23 08:34:36.665025262 +0000 UTC m=+862.237981640 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-memberlist") pod "speaker-kkqsn" (UID: "f1cfbcc9-7d53-4ec2-806c-c6070123c2fe") : secret "metallb-memberlist" not found Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.668880 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65ea66aa-64dd-4c97-9d69-4984dac21b0f-metrics-certs\") pod \"controller-6968d8fdc4-jz2p2\" (UID: \"65ea66aa-64dd-4c97-9d69-4984dac21b0f\") " pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.837866 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.921061 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" event={"ID":"cb216e80-98b3-46dc-b45e-86407aa0fc7b","Type":"ContainerStarted","Data":"3c89d7140af605bde32bad48bb6b3f0ebf7ae3aae6ee73eedf2739833d0e09bf"} Jan 23 08:34:35 crc kubenswrapper[4711]: I0123 08:34:35.924434 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:36 crc kubenswrapper[4711]: I0123 08:34:36.182395 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-jz2p2"] Jan 23 08:34:36 crc kubenswrapper[4711]: I0123 08:34:36.694597 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-memberlist\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:36 crc kubenswrapper[4711]: I0123 08:34:36.700585 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f1cfbcc9-7d53-4ec2-806c-c6070123c2fe-memberlist\") pod \"speaker-kkqsn\" (UID: \"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe\") " pod="metallb-system/speaker-kkqsn" Jan 23 08:34:36 crc kubenswrapper[4711]: I0123 08:34:36.775664 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-kkqsn" Jan 23 08:34:36 crc kubenswrapper[4711]: W0123 08:34:36.796362 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1cfbcc9_7d53_4ec2_806c_c6070123c2fe.slice/crio-ef5e3fbf7f3a811fe1cd1b21b59f7c28c9be1d0bb68b4155eebb78328434ae5d WatchSource:0}: Error finding container ef5e3fbf7f3a811fe1cd1b21b59f7c28c9be1d0bb68b4155eebb78328434ae5d: Status 404 returned error can't find the container with id ef5e3fbf7f3a811fe1cd1b21b59f7c28c9be1d0bb68b4155eebb78328434ae5d Jan 23 08:34:36 crc kubenswrapper[4711]: I0123 08:34:36.930814 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-26kvs" event={"ID":"81ea7880-2a6c-4e0a-8489-063feb2f99b7","Type":"ContainerStarted","Data":"e4495c9fb233b4192987cb5b6bd927a73866d32416af1e74311accb30cdd69b4"} Jan 23 08:34:36 crc kubenswrapper[4711]: I0123 08:34:36.935261 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kkqsn" event={"ID":"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe","Type":"ContainerStarted","Data":"ef5e3fbf7f3a811fe1cd1b21b59f7c28c9be1d0bb68b4155eebb78328434ae5d"} Jan 23 08:34:36 crc kubenswrapper[4711]: I0123 08:34:36.939284 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-jz2p2" event={"ID":"65ea66aa-64dd-4c97-9d69-4984dac21b0f","Type":"ContainerStarted","Data":"aaae30faecf81e4f15d62c94177cbb58ae645f045b5856c744bfce8e2acd11bf"} Jan 23 08:34:36 crc kubenswrapper[4711]: I0123 08:34:36.939342 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-jz2p2" event={"ID":"65ea66aa-64dd-4c97-9d69-4984dac21b0f","Type":"ContainerStarted","Data":"de0d103ec6f95ca4f36fa5a4566be13a3bdc6011e15d233cee3d68d7cb6bb273"} Jan 23 08:34:36 crc kubenswrapper[4711]: I0123 08:34:36.939360 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-jz2p2" event={"ID":"65ea66aa-64dd-4c97-9d69-4984dac21b0f","Type":"ContainerStarted","Data":"0e8fa947a3ae84fce961126bf389c94ae6591e80b7dcb346def3dea53dfca493"} Jan 23 08:34:36 crc kubenswrapper[4711]: I0123 08:34:36.940402 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:36 crc kubenswrapper[4711]: I0123 08:34:36.960021 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-jz2p2" podStartSLOduration=2.96000239 podStartE2EDuration="2.96000239s" podCreationTimestamp="2026-01-23 08:34:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:34:36.956210509 +0000 UTC m=+862.529166877" watchObservedRunningTime="2026-01-23 08:34:36.96000239 +0000 UTC m=+862.532958758" Jan 23 08:34:37 crc kubenswrapper[4711]: I0123 08:34:37.965833 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kkqsn" event={"ID":"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe","Type":"ContainerStarted","Data":"48ec37e94478f1fb25e0d6e8fb3826cbfc759ed6845196a9350c99d2e9338fca"} Jan 23 08:34:37 crc kubenswrapper[4711]: I0123 08:34:37.966165 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-kkqsn" event={"ID":"f1cfbcc9-7d53-4ec2-806c-c6070123c2fe","Type":"ContainerStarted","Data":"02a558aa578005e643c114bfc05541ce003b81b393183bb6441071f3fbf615a2"} Jan 23 08:34:37 crc kubenswrapper[4711]: I0123 08:34:37.966188 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-kkqsn" Jan 23 08:34:42 crc kubenswrapper[4711]: I0123 08:34:42.997848 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" event={"ID":"cb216e80-98b3-46dc-b45e-86407aa0fc7b","Type":"ContainerStarted","Data":"fd9a1244be3a425b7a02e04f1ede74fa18c88bf78008ed003e56ce72c01fd83d"} Jan 23 08:34:43 crc kubenswrapper[4711]: I0123 08:34:42.998599 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" Jan 23 08:34:43 crc kubenswrapper[4711]: I0123 08:34:43.000428 4711 generic.go:334] "Generic (PLEG): container finished" podID="81ea7880-2a6c-4e0a-8489-063feb2f99b7" containerID="fc17bf90260297f736a495ab9e277b7fb9a36d90ec5629a92484dc87a51742ad" exitCode=0 Jan 23 08:34:43 crc kubenswrapper[4711]: I0123 08:34:43.000497 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-26kvs" event={"ID":"81ea7880-2a6c-4e0a-8489-063feb2f99b7","Type":"ContainerDied","Data":"fc17bf90260297f736a495ab9e277b7fb9a36d90ec5629a92484dc87a51742ad"} Jan 23 08:34:43 crc kubenswrapper[4711]: I0123 08:34:43.025761 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" podStartSLOduration=2.264364693 podStartE2EDuration="9.025739081s" podCreationTimestamp="2026-01-23 08:34:34 +0000 UTC" firstStartedPulling="2026-01-23 08:34:35.637878962 +0000 UTC m=+861.210835330" lastFinishedPulling="2026-01-23 08:34:42.39925335 +0000 UTC m=+867.972209718" observedRunningTime="2026-01-23 08:34:43.02078242 +0000 UTC m=+868.593738798" watchObservedRunningTime="2026-01-23 08:34:43.025739081 +0000 UTC m=+868.598695459" Jan 23 08:34:43 crc kubenswrapper[4711]: I0123 08:34:43.025914 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-kkqsn" podStartSLOduration=9.025903865 podStartE2EDuration="9.025903865s" podCreationTimestamp="2026-01-23 08:34:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:34:37.986880655 +0000 UTC m=+863.559837023" watchObservedRunningTime="2026-01-23 08:34:43.025903865 +0000 UTC m=+868.598860273" Jan 23 08:34:44 crc kubenswrapper[4711]: I0123 08:34:44.007584 4711 generic.go:334] "Generic (PLEG): container finished" podID="81ea7880-2a6c-4e0a-8489-063feb2f99b7" containerID="f3c26ededb96867fab4f259d26e23b4bcd9a54c893430d571a78fb3fe66cd687" exitCode=0 Jan 23 08:34:44 crc kubenswrapper[4711]: I0123 08:34:44.008958 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-26kvs" event={"ID":"81ea7880-2a6c-4e0a-8489-063feb2f99b7","Type":"ContainerDied","Data":"f3c26ededb96867fab4f259d26e23b4bcd9a54c893430d571a78fb3fe66cd687"} Jan 23 08:34:45 crc kubenswrapper[4711]: I0123 08:34:45.015982 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-26kvs" event={"ID":"81ea7880-2a6c-4e0a-8489-063feb2f99b7","Type":"ContainerStarted","Data":"0a8459dfa43bc2f29a9f7bff7958e2b01bf3647d2849975b7c0898880e784c2e"} Jan 23 08:34:46 crc kubenswrapper[4711]: I0123 08:34:46.024133 4711 generic.go:334] "Generic (PLEG): container finished" podID="81ea7880-2a6c-4e0a-8489-063feb2f99b7" containerID="0a8459dfa43bc2f29a9f7bff7958e2b01bf3647d2849975b7c0898880e784c2e" exitCode=0 Jan 23 08:34:46 crc kubenswrapper[4711]: I0123 08:34:46.024237 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-26kvs" event={"ID":"81ea7880-2a6c-4e0a-8489-063feb2f99b7","Type":"ContainerDied","Data":"0a8459dfa43bc2f29a9f7bff7958e2b01bf3647d2849975b7c0898880e784c2e"} Jan 23 08:34:47 crc kubenswrapper[4711]: I0123 08:34:47.042720 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-26kvs" event={"ID":"81ea7880-2a6c-4e0a-8489-063feb2f99b7","Type":"ContainerStarted","Data":"0db33ec0e214209d91765c04cbf501d77ef49f4f486c5ba6b0d3369e3502da6d"} Jan 23 08:34:47 crc kubenswrapper[4711]: I0123 08:34:47.042762 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-26kvs" event={"ID":"81ea7880-2a6c-4e0a-8489-063feb2f99b7","Type":"ContainerStarted","Data":"383c5bfd537494b6b3b5563dbbe8bc1fe52517a5d35bdd6721e64a2233bb6fb4"} Jan 23 08:34:47 crc kubenswrapper[4711]: I0123 08:34:47.042773 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-26kvs" event={"ID":"81ea7880-2a6c-4e0a-8489-063feb2f99b7","Type":"ContainerStarted","Data":"628e27eac5955cc43597d8259a54dc8b98955d1f20edc68ce0b2d2e461c13b6b"} Jan 23 08:34:47 crc kubenswrapper[4711]: I0123 08:34:47.042782 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-26kvs" event={"ID":"81ea7880-2a6c-4e0a-8489-063feb2f99b7","Type":"ContainerStarted","Data":"f6f4bcf2ed41ce20a99f1d2edd990c6c4187e678721e6a6729be9f8f46e0e9e3"} Jan 23 08:34:48 crc kubenswrapper[4711]: I0123 08:34:48.059917 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-26kvs" event={"ID":"81ea7880-2a6c-4e0a-8489-063feb2f99b7","Type":"ContainerStarted","Data":"5b32653337e4e6234126681db8e26d1e232f10e174f5c9613b01c03cb13dfadf"} Jan 23 08:34:48 crc kubenswrapper[4711]: I0123 08:34:48.059976 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-26kvs" event={"ID":"81ea7880-2a6c-4e0a-8489-063feb2f99b7","Type":"ContainerStarted","Data":"38bee2db5a1da449922060858e36e025cde464d3847f37c5780e53bc53d16e6f"} Jan 23 08:34:48 crc kubenswrapper[4711]: I0123 08:34:48.060407 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:48 crc kubenswrapper[4711]: I0123 08:34:48.101440 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-26kvs" podStartSLOduration=7.664852541 podStartE2EDuration="14.101412788s" podCreationTimestamp="2026-01-23 08:34:34 +0000 UTC" firstStartedPulling="2026-01-23 08:34:35.973424134 +0000 UTC m=+861.546380502" lastFinishedPulling="2026-01-23 08:34:42.409984381 +0000 UTC m=+867.982940749" observedRunningTime="2026-01-23 08:34:48.094869679 +0000 UTC m=+873.667826047" watchObservedRunningTime="2026-01-23 08:34:48.101412788 +0000 UTC m=+873.674369196" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.347537 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ns642"] Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.349842 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.366569 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ns642"] Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.499448 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwx98\" (UniqueName: \"kubernetes.io/projected/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-kube-api-access-cwx98\") pod \"certified-operators-ns642\" (UID: \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\") " pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.499545 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-utilities\") pod \"certified-operators-ns642\" (UID: \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\") " pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.499574 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-catalog-content\") pod \"certified-operators-ns642\" (UID: \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\") " pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.601303 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwx98\" (UniqueName: \"kubernetes.io/projected/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-kube-api-access-cwx98\") pod \"certified-operators-ns642\" (UID: \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\") " pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.601472 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-utilities\") pod \"certified-operators-ns642\" (UID: \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\") " pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.601509 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-catalog-content\") pod \"certified-operators-ns642\" (UID: \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\") " pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.602162 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-catalog-content\") pod \"certified-operators-ns642\" (UID: \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\") " pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.602633 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-utilities\") pod \"certified-operators-ns642\" (UID: \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\") " pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.622626 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwx98\" (UniqueName: \"kubernetes.io/projected/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-kube-api-access-cwx98\") pod \"certified-operators-ns642\" (UID: \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\") " pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.673383 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.840808 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:50 crc kubenswrapper[4711]: I0123 08:34:50.918863 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-26kvs" Jan 23 08:34:51 crc kubenswrapper[4711]: I0123 08:34:51.148005 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ns642"] Jan 23 08:34:52 crc kubenswrapper[4711]: I0123 08:34:52.094172 4711 generic.go:334] "Generic (PLEG): container finished" podID="fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" containerID="3632f11a117de2ec38975330927596dc0950ee01bc20b13a13dbab31b7635246" exitCode=0 Jan 23 08:34:52 crc kubenswrapper[4711]: I0123 08:34:52.094250 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ns642" event={"ID":"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c","Type":"ContainerDied","Data":"3632f11a117de2ec38975330927596dc0950ee01bc20b13a13dbab31b7635246"} Jan 23 08:34:52 crc kubenswrapper[4711]: I0123 08:34:52.094557 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ns642" event={"ID":"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c","Type":"ContainerStarted","Data":"ae7661dcbd66f296fa0fabe9f2e36e3fa8b5e9d975b0e92b8dcb9531127a9bb6"} Jan 23 08:34:53 crc kubenswrapper[4711]: I0123 08:34:53.102670 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ns642" event={"ID":"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c","Type":"ContainerStarted","Data":"3f509053ce3e1c15c06108cf04d388daa9eb8b89d89639241887dd45e9895a85"} Jan 23 08:34:54 crc kubenswrapper[4711]: I0123 08:34:54.109660 4711 generic.go:334] "Generic (PLEG): container finished" podID="fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" containerID="3f509053ce3e1c15c06108cf04d388daa9eb8b89d89639241887dd45e9895a85" exitCode=0 Jan 23 08:34:54 crc kubenswrapper[4711]: I0123 08:34:54.109710 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ns642" event={"ID":"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c","Type":"ContainerDied","Data":"3f509053ce3e1c15c06108cf04d388daa9eb8b89d89639241887dd45e9895a85"} Jan 23 08:34:55 crc kubenswrapper[4711]: I0123 08:34:55.232781 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-th8jp" Jan 23 08:34:55 crc kubenswrapper[4711]: I0123 08:34:55.929033 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-jz2p2" Jan 23 08:34:56 crc kubenswrapper[4711]: I0123 08:34:56.128314 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ns642" event={"ID":"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c","Type":"ContainerStarted","Data":"999152484acaa425e209e0759505a9bc047525d49400fbf69f82cd78e8fc08b3"} Jan 23 08:34:56 crc kubenswrapper[4711]: I0123 08:34:56.152177 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ns642" podStartSLOduration=2.844799316 podStartE2EDuration="6.15215992s" podCreationTimestamp="2026-01-23 08:34:50 +0000 UTC" firstStartedPulling="2026-01-23 08:34:52.096181282 +0000 UTC m=+877.669137650" lastFinishedPulling="2026-01-23 08:34:55.403541886 +0000 UTC m=+880.976498254" observedRunningTime="2026-01-23 08:34:56.149323021 +0000 UTC m=+881.722279409" watchObservedRunningTime="2026-01-23 08:34:56.15215992 +0000 UTC m=+881.725116288" Jan 23 08:34:56 crc kubenswrapper[4711]: I0123 08:34:56.779609 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-kkqsn" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.167124 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr"] Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.168614 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.171385 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.178585 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr"] Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.304265 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfd6t\" (UniqueName: \"kubernetes.io/projected/dc8bec03-5a55-4232-a3de-6650e4c7a7da-kube-api-access-xfd6t\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr\" (UID: \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.304387 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc8bec03-5a55-4232-a3de-6650e4c7a7da-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr\" (UID: \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.304446 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc8bec03-5a55-4232-a3de-6650e4c7a7da-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr\" (UID: \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.405598 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc8bec03-5a55-4232-a3de-6650e4c7a7da-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr\" (UID: \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.405667 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfd6t\" (UniqueName: \"kubernetes.io/projected/dc8bec03-5a55-4232-a3de-6650e4c7a7da-kube-api-access-xfd6t\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr\" (UID: \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.405712 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc8bec03-5a55-4232-a3de-6650e4c7a7da-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr\" (UID: \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.406041 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc8bec03-5a55-4232-a3de-6650e4c7a7da-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr\" (UID: \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.406114 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc8bec03-5a55-4232-a3de-6650e4c7a7da-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr\" (UID: \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.424344 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfd6t\" (UniqueName: \"kubernetes.io/projected/dc8bec03-5a55-4232-a3de-6650e4c7a7da-kube-api-access-xfd6t\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr\" (UID: \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.486703 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:34:58 crc kubenswrapper[4711]: I0123 08:34:58.704453 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr"] Jan 23 08:34:58 crc kubenswrapper[4711]: W0123 08:34:58.718557 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc8bec03_5a55_4232_a3de_6650e4c7a7da.slice/crio-f59d53b4a95be3ae3de50d8235c1cb4580e6c37a20542582b15f38f20d4de3b6 WatchSource:0}: Error finding container f59d53b4a95be3ae3de50d8235c1cb4580e6c37a20542582b15f38f20d4de3b6: Status 404 returned error can't find the container with id f59d53b4a95be3ae3de50d8235c1cb4580e6c37a20542582b15f38f20d4de3b6 Jan 23 08:34:59 crc kubenswrapper[4711]: I0123 08:34:59.145682 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" event={"ID":"dc8bec03-5a55-4232-a3de-6650e4c7a7da","Type":"ContainerStarted","Data":"f59d53b4a95be3ae3de50d8235c1cb4580e6c37a20542582b15f38f20d4de3b6"} Jan 23 08:35:00 crc kubenswrapper[4711]: I0123 08:35:00.674357 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:35:00 crc kubenswrapper[4711]: I0123 08:35:00.674771 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:35:00 crc kubenswrapper[4711]: I0123 08:35:00.722564 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:35:01 crc kubenswrapper[4711]: I0123 08:35:01.193355 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:35:03 crc kubenswrapper[4711]: I0123 08:35:03.127299 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ns642"] Jan 23 08:35:03 crc kubenswrapper[4711]: I0123 08:35:03.168676 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ns642" podUID="fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" containerName="registry-server" containerID="cri-o://999152484acaa425e209e0759505a9bc047525d49400fbf69f82cd78e8fc08b3" gracePeriod=2 Jan 23 08:35:05 crc kubenswrapper[4711]: I0123 08:35:05.840886 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-26kvs" Jan 23 08:35:06 crc kubenswrapper[4711]: I0123 08:35:06.190617 4711 generic.go:334] "Generic (PLEG): container finished" podID="dc8bec03-5a55-4232-a3de-6650e4c7a7da" containerID="08e7f33cd80eb9d6aa97457b7a7ade8edd18e61641856ea3801a4f72acdb8036" exitCode=0 Jan 23 08:35:06 crc kubenswrapper[4711]: I0123 08:35:06.190656 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" event={"ID":"dc8bec03-5a55-4232-a3de-6650e4c7a7da","Type":"ContainerDied","Data":"08e7f33cd80eb9d6aa97457b7a7ade8edd18e61641856ea3801a4f72acdb8036"} Jan 23 08:35:06 crc kubenswrapper[4711]: I0123 08:35:06.193424 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ns642_fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c/registry-server/0.log" Jan 23 08:35:06 crc kubenswrapper[4711]: I0123 08:35:06.194091 4711 generic.go:334] "Generic (PLEG): container finished" podID="fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" containerID="999152484acaa425e209e0759505a9bc047525d49400fbf69f82cd78e8fc08b3" exitCode=137 Jan 23 08:35:06 crc kubenswrapper[4711]: I0123 08:35:06.194127 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ns642" event={"ID":"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c","Type":"ContainerDied","Data":"999152484acaa425e209e0759505a9bc047525d49400fbf69f82cd78e8fc08b3"} Jan 23 08:35:07 crc kubenswrapper[4711]: I0123 08:35:07.282858 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ns642_fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c/registry-server/0.log" Jan 23 08:35:07 crc kubenswrapper[4711]: I0123 08:35:07.284182 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:35:07 crc kubenswrapper[4711]: I0123 08:35:07.328007 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-catalog-content\") pod \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\" (UID: \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\") " Jan 23 08:35:07 crc kubenswrapper[4711]: I0123 08:35:07.328072 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-utilities\") pod \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\" (UID: \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\") " Jan 23 08:35:07 crc kubenswrapper[4711]: I0123 08:35:07.328108 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwx98\" (UniqueName: \"kubernetes.io/projected/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-kube-api-access-cwx98\") pod \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\" (UID: \"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c\") " Jan 23 08:35:07 crc kubenswrapper[4711]: I0123 08:35:07.329910 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-utilities" (OuterVolumeSpecName: "utilities") pod "fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" (UID: "fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:35:07 crc kubenswrapper[4711]: I0123 08:35:07.334977 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-kube-api-access-cwx98" (OuterVolumeSpecName: "kube-api-access-cwx98") pod "fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" (UID: "fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c"). InnerVolumeSpecName "kube-api-access-cwx98". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:35:07 crc kubenswrapper[4711]: I0123 08:35:07.385557 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" (UID: "fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:35:07 crc kubenswrapper[4711]: I0123 08:35:07.430370 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:35:07 crc kubenswrapper[4711]: I0123 08:35:07.430411 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:35:07 crc kubenswrapper[4711]: I0123 08:35:07.430422 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwx98\" (UniqueName: \"kubernetes.io/projected/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c-kube-api-access-cwx98\") on node \"crc\" DevicePath \"\"" Jan 23 08:35:08 crc kubenswrapper[4711]: I0123 08:35:08.219682 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ns642_fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c/registry-server/0.log" Jan 23 08:35:08 crc kubenswrapper[4711]: I0123 08:35:08.220444 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ns642" event={"ID":"fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c","Type":"ContainerDied","Data":"ae7661dcbd66f296fa0fabe9f2e36e3fa8b5e9d975b0e92b8dcb9531127a9bb6"} Jan 23 08:35:08 crc kubenswrapper[4711]: I0123 08:35:08.220485 4711 scope.go:117] "RemoveContainer" containerID="999152484acaa425e209e0759505a9bc047525d49400fbf69f82cd78e8fc08b3" Jan 23 08:35:08 crc kubenswrapper[4711]: I0123 08:35:08.220488 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ns642" Jan 23 08:35:08 crc kubenswrapper[4711]: I0123 08:35:08.241274 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ns642"] Jan 23 08:35:08 crc kubenswrapper[4711]: I0123 08:35:08.241470 4711 scope.go:117] "RemoveContainer" containerID="3f509053ce3e1c15c06108cf04d388daa9eb8b89d89639241887dd45e9895a85" Jan 23 08:35:08 crc kubenswrapper[4711]: I0123 08:35:08.247214 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ns642"] Jan 23 08:35:08 crc kubenswrapper[4711]: I0123 08:35:08.275404 4711 scope.go:117] "RemoveContainer" containerID="3632f11a117de2ec38975330927596dc0950ee01bc20b13a13dbab31b7635246" Jan 23 08:35:09 crc kubenswrapper[4711]: I0123 08:35:09.482048 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" path="/var/lib/kubelet/pods/fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c/volumes" Jan 23 08:35:19 crc kubenswrapper[4711]: I0123 08:35:19.303165 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" event={"ID":"dc8bec03-5a55-4232-a3de-6650e4c7a7da","Type":"ContainerStarted","Data":"23c2b7940db8f06055d10cb675d2aa111a816f72be420e56d571977c55bb1bc2"} Jan 23 08:35:20 crc kubenswrapper[4711]: I0123 08:35:20.310917 4711 generic.go:334] "Generic (PLEG): container finished" podID="dc8bec03-5a55-4232-a3de-6650e4c7a7da" containerID="23c2b7940db8f06055d10cb675d2aa111a816f72be420e56d571977c55bb1bc2" exitCode=0 Jan 23 08:35:20 crc kubenswrapper[4711]: I0123 08:35:20.311021 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" event={"ID":"dc8bec03-5a55-4232-a3de-6650e4c7a7da","Type":"ContainerDied","Data":"23c2b7940db8f06055d10cb675d2aa111a816f72be420e56d571977c55bb1bc2"} Jan 23 08:35:21 crc kubenswrapper[4711]: I0123 08:35:21.319521 4711 generic.go:334] "Generic (PLEG): container finished" podID="dc8bec03-5a55-4232-a3de-6650e4c7a7da" containerID="3518d94b1b161a3bc5575c3ffeccf70a66252af5562e008a8747c250ee37c730" exitCode=0 Jan 23 08:35:21 crc kubenswrapper[4711]: I0123 08:35:21.319570 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" event={"ID":"dc8bec03-5a55-4232-a3de-6650e4c7a7da","Type":"ContainerDied","Data":"3518d94b1b161a3bc5575c3ffeccf70a66252af5562e008a8747c250ee37c730"} Jan 23 08:35:22 crc kubenswrapper[4711]: I0123 08:35:22.572658 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:35:22 crc kubenswrapper[4711]: I0123 08:35:22.646391 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfd6t\" (UniqueName: \"kubernetes.io/projected/dc8bec03-5a55-4232-a3de-6650e4c7a7da-kube-api-access-xfd6t\") pod \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\" (UID: \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\") " Jan 23 08:35:22 crc kubenswrapper[4711]: I0123 08:35:22.646644 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc8bec03-5a55-4232-a3de-6650e4c7a7da-bundle\") pod \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\" (UID: \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\") " Jan 23 08:35:22 crc kubenswrapper[4711]: I0123 08:35:22.646733 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc8bec03-5a55-4232-a3de-6650e4c7a7da-util\") pod \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\" (UID: \"dc8bec03-5a55-4232-a3de-6650e4c7a7da\") " Jan 23 08:35:22 crc kubenswrapper[4711]: I0123 08:35:22.647947 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc8bec03-5a55-4232-a3de-6650e4c7a7da-bundle" (OuterVolumeSpecName: "bundle") pod "dc8bec03-5a55-4232-a3de-6650e4c7a7da" (UID: "dc8bec03-5a55-4232-a3de-6650e4c7a7da"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:35:22 crc kubenswrapper[4711]: I0123 08:35:22.652264 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc8bec03-5a55-4232-a3de-6650e4c7a7da-kube-api-access-xfd6t" (OuterVolumeSpecName: "kube-api-access-xfd6t") pod "dc8bec03-5a55-4232-a3de-6650e4c7a7da" (UID: "dc8bec03-5a55-4232-a3de-6650e4c7a7da"). InnerVolumeSpecName "kube-api-access-xfd6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:35:22 crc kubenswrapper[4711]: I0123 08:35:22.659372 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc8bec03-5a55-4232-a3de-6650e4c7a7da-util" (OuterVolumeSpecName: "util") pod "dc8bec03-5a55-4232-a3de-6650e4c7a7da" (UID: "dc8bec03-5a55-4232-a3de-6650e4c7a7da"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:35:22 crc kubenswrapper[4711]: I0123 08:35:22.756808 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfd6t\" (UniqueName: \"kubernetes.io/projected/dc8bec03-5a55-4232-a3de-6650e4c7a7da-kube-api-access-xfd6t\") on node \"crc\" DevicePath \"\"" Jan 23 08:35:22 crc kubenswrapper[4711]: I0123 08:35:22.756862 4711 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc8bec03-5a55-4232-a3de-6650e4c7a7da-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:35:22 crc kubenswrapper[4711]: I0123 08:35:22.756879 4711 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc8bec03-5a55-4232-a3de-6650e4c7a7da-util\") on node \"crc\" DevicePath \"\"" Jan 23 08:35:23 crc kubenswrapper[4711]: I0123 08:35:23.334151 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" event={"ID":"dc8bec03-5a55-4232-a3de-6650e4c7a7da","Type":"ContainerDied","Data":"f59d53b4a95be3ae3de50d8235c1cb4580e6c37a20542582b15f38f20d4de3b6"} Jan 23 08:35:23 crc kubenswrapper[4711]: I0123 08:35:23.334193 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f59d53b4a95be3ae3de50d8235c1cb4580e6c37a20542582b15f38f20d4de3b6" Jan 23 08:35:23 crc kubenswrapper[4711]: I0123 08:35:23.334230 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.817934 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp"] Jan 23 08:35:30 crc kubenswrapper[4711]: E0123 08:35:30.820349 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc8bec03-5a55-4232-a3de-6650e4c7a7da" containerName="util" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.820503 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc8bec03-5a55-4232-a3de-6650e4c7a7da" containerName="util" Jan 23 08:35:30 crc kubenswrapper[4711]: E0123 08:35:30.820696 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc8bec03-5a55-4232-a3de-6650e4c7a7da" containerName="pull" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.820834 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc8bec03-5a55-4232-a3de-6650e4c7a7da" containerName="pull" Jan 23 08:35:30 crc kubenswrapper[4711]: E0123 08:35:30.820959 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" containerName="extract-utilities" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.821078 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" containerName="extract-utilities" Jan 23 08:35:30 crc kubenswrapper[4711]: E0123 08:35:30.821196 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" containerName="registry-server" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.821314 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" containerName="registry-server" Jan 23 08:35:30 crc kubenswrapper[4711]: E0123 08:35:30.821432 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc8bec03-5a55-4232-a3de-6650e4c7a7da" containerName="extract" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.821577 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc8bec03-5a55-4232-a3de-6650e4c7a7da" containerName="extract" Jan 23 08:35:30 crc kubenswrapper[4711]: E0123 08:35:30.821750 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" containerName="extract-content" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.821874 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" containerName="extract-content" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.822329 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc8bec03-5a55-4232-a3de-6650e4c7a7da" containerName="extract" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.822474 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa6c1af7-ffd5-4af4-a7ac-e88d67ef981c" containerName="registry-server" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.823563 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.825550 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp"] Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.826292 4711 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-t4kwx" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.832228 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.833373 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.907570 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmhzk\" (UniqueName: \"kubernetes.io/projected/38417491-d01f-499f-9d41-42b826eaa57a-kube-api-access-fmhzk\") pod \"cert-manager-operator-controller-manager-64cf6dff88-kd2sp\" (UID: \"38417491-d01f-499f-9d41-42b826eaa57a\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp" Jan 23 08:35:30 crc kubenswrapper[4711]: I0123 08:35:30.907649 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/38417491-d01f-499f-9d41-42b826eaa57a-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-kd2sp\" (UID: \"38417491-d01f-499f-9d41-42b826eaa57a\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp" Jan 23 08:35:31 crc kubenswrapper[4711]: I0123 08:35:31.009170 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/38417491-d01f-499f-9d41-42b826eaa57a-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-kd2sp\" (UID: \"38417491-d01f-499f-9d41-42b826eaa57a\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp" Jan 23 08:35:31 crc kubenswrapper[4711]: I0123 08:35:31.009285 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmhzk\" (UniqueName: \"kubernetes.io/projected/38417491-d01f-499f-9d41-42b826eaa57a-kube-api-access-fmhzk\") pod \"cert-manager-operator-controller-manager-64cf6dff88-kd2sp\" (UID: \"38417491-d01f-499f-9d41-42b826eaa57a\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp" Jan 23 08:35:31 crc kubenswrapper[4711]: I0123 08:35:31.010031 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/38417491-d01f-499f-9d41-42b826eaa57a-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-kd2sp\" (UID: \"38417491-d01f-499f-9d41-42b826eaa57a\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp" Jan 23 08:35:31 crc kubenswrapper[4711]: I0123 08:35:31.031624 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmhzk\" (UniqueName: \"kubernetes.io/projected/38417491-d01f-499f-9d41-42b826eaa57a-kube-api-access-fmhzk\") pod \"cert-manager-operator-controller-manager-64cf6dff88-kd2sp\" (UID: \"38417491-d01f-499f-9d41-42b826eaa57a\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp" Jan 23 08:35:31 crc kubenswrapper[4711]: I0123 08:35:31.142020 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp" Jan 23 08:35:31 crc kubenswrapper[4711]: I0123 08:35:31.535913 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp"] Jan 23 08:35:31 crc kubenswrapper[4711]: W0123 08:35:31.543827 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38417491_d01f_499f_9d41_42b826eaa57a.slice/crio-e104eab02790fc19f7c8f121cfeafbb7eeb28f7a9bc4340cdf0acf1bb27dcf3c WatchSource:0}: Error finding container e104eab02790fc19f7c8f121cfeafbb7eeb28f7a9bc4340cdf0acf1bb27dcf3c: Status 404 returned error can't find the container with id e104eab02790fc19f7c8f121cfeafbb7eeb28f7a9bc4340cdf0acf1bb27dcf3c Jan 23 08:35:32 crc kubenswrapper[4711]: I0123 08:35:32.395074 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp" event={"ID":"38417491-d01f-499f-9d41-42b826eaa57a","Type":"ContainerStarted","Data":"e104eab02790fc19f7c8f121cfeafbb7eeb28f7a9bc4340cdf0acf1bb27dcf3c"} Jan 23 08:35:44 crc kubenswrapper[4711]: I0123 08:35:44.916954 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xgl2g"] Jan 23 08:35:44 crc kubenswrapper[4711]: I0123 08:35:44.921601 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:44 crc kubenswrapper[4711]: I0123 08:35:44.928309 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgl2g"] Jan 23 08:35:45 crc kubenswrapper[4711]: I0123 08:35:45.003997 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aedfa99e-f804-4822-b8cb-6436f4311888-utilities\") pod \"redhat-marketplace-xgl2g\" (UID: \"aedfa99e-f804-4822-b8cb-6436f4311888\") " pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:45 crc kubenswrapper[4711]: I0123 08:35:45.004073 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqw2t\" (UniqueName: \"kubernetes.io/projected/aedfa99e-f804-4822-b8cb-6436f4311888-kube-api-access-tqw2t\") pod \"redhat-marketplace-xgl2g\" (UID: \"aedfa99e-f804-4822-b8cb-6436f4311888\") " pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:45 crc kubenswrapper[4711]: I0123 08:35:45.004309 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aedfa99e-f804-4822-b8cb-6436f4311888-catalog-content\") pod \"redhat-marketplace-xgl2g\" (UID: \"aedfa99e-f804-4822-b8cb-6436f4311888\") " pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:45 crc kubenswrapper[4711]: I0123 08:35:45.105806 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aedfa99e-f804-4822-b8cb-6436f4311888-utilities\") pod \"redhat-marketplace-xgl2g\" (UID: \"aedfa99e-f804-4822-b8cb-6436f4311888\") " pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:45 crc kubenswrapper[4711]: I0123 08:35:45.105884 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqw2t\" (UniqueName: \"kubernetes.io/projected/aedfa99e-f804-4822-b8cb-6436f4311888-kube-api-access-tqw2t\") pod \"redhat-marketplace-xgl2g\" (UID: \"aedfa99e-f804-4822-b8cb-6436f4311888\") " pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:45 crc kubenswrapper[4711]: I0123 08:35:45.105922 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aedfa99e-f804-4822-b8cb-6436f4311888-catalog-content\") pod \"redhat-marketplace-xgl2g\" (UID: \"aedfa99e-f804-4822-b8cb-6436f4311888\") " pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:45 crc kubenswrapper[4711]: I0123 08:35:45.106558 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aedfa99e-f804-4822-b8cb-6436f4311888-catalog-content\") pod \"redhat-marketplace-xgl2g\" (UID: \"aedfa99e-f804-4822-b8cb-6436f4311888\") " pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:45 crc kubenswrapper[4711]: I0123 08:35:45.106564 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aedfa99e-f804-4822-b8cb-6436f4311888-utilities\") pod \"redhat-marketplace-xgl2g\" (UID: \"aedfa99e-f804-4822-b8cb-6436f4311888\") " pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:45 crc kubenswrapper[4711]: I0123 08:35:45.129277 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqw2t\" (UniqueName: \"kubernetes.io/projected/aedfa99e-f804-4822-b8cb-6436f4311888-kube-api-access-tqw2t\") pod \"redhat-marketplace-xgl2g\" (UID: \"aedfa99e-f804-4822-b8cb-6436f4311888\") " pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:45 crc kubenswrapper[4711]: I0123 08:35:45.244262 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:49 crc kubenswrapper[4711]: I0123 08:35:49.686243 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgl2g"] Jan 23 08:35:49 crc kubenswrapper[4711]: W0123 08:35:49.691963 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaedfa99e_f804_4822_b8cb_6436f4311888.slice/crio-a255a262476fadf569aaa725b2909cd382299fe0fe8136057193d0075767bbb6 WatchSource:0}: Error finding container a255a262476fadf569aaa725b2909cd382299fe0fe8136057193d0075767bbb6: Status 404 returned error can't find the container with id a255a262476fadf569aaa725b2909cd382299fe0fe8136057193d0075767bbb6 Jan 23 08:35:50 crc kubenswrapper[4711]: I0123 08:35:50.504984 4711 generic.go:334] "Generic (PLEG): container finished" podID="aedfa99e-f804-4822-b8cb-6436f4311888" containerID="8095228adcc6ed98f8114900710375a16ec5edd1b87152fa911fb1f95c3b4590" exitCode=0 Jan 23 08:35:50 crc kubenswrapper[4711]: I0123 08:35:50.505038 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgl2g" event={"ID":"aedfa99e-f804-4822-b8cb-6436f4311888","Type":"ContainerDied","Data":"8095228adcc6ed98f8114900710375a16ec5edd1b87152fa911fb1f95c3b4590"} Jan 23 08:35:50 crc kubenswrapper[4711]: I0123 08:35:50.505332 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgl2g" event={"ID":"aedfa99e-f804-4822-b8cb-6436f4311888","Type":"ContainerStarted","Data":"a255a262476fadf569aaa725b2909cd382299fe0fe8136057193d0075767bbb6"} Jan 23 08:35:50 crc kubenswrapper[4711]: I0123 08:35:50.508321 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp" event={"ID":"38417491-d01f-499f-9d41-42b826eaa57a","Type":"ContainerStarted","Data":"c5626026b785230694a799f9d88d58892b26718b203baf320bf2873ce16b1010"} Jan 23 08:35:50 crc kubenswrapper[4711]: I0123 08:35:50.545216 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-kd2sp" podStartSLOduration=2.451677156 podStartE2EDuration="20.545194116s" podCreationTimestamp="2026-01-23 08:35:30 +0000 UTC" firstStartedPulling="2026-01-23 08:35:31.546074293 +0000 UTC m=+917.119030661" lastFinishedPulling="2026-01-23 08:35:49.639591253 +0000 UTC m=+935.212547621" observedRunningTime="2026-01-23 08:35:50.544320234 +0000 UTC m=+936.117276592" watchObservedRunningTime="2026-01-23 08:35:50.545194116 +0000 UTC m=+936.118150484" Jan 23 08:35:51 crc kubenswrapper[4711]: I0123 08:35:51.515914 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgl2g" event={"ID":"aedfa99e-f804-4822-b8cb-6436f4311888","Type":"ContainerStarted","Data":"1b76648dbf73e6ab5302434f4a2d70603e06477e1b05258a3749a42a30a56414"} Jan 23 08:35:52 crc kubenswrapper[4711]: I0123 08:35:52.523471 4711 generic.go:334] "Generic (PLEG): container finished" podID="aedfa99e-f804-4822-b8cb-6436f4311888" containerID="1b76648dbf73e6ab5302434f4a2d70603e06477e1b05258a3749a42a30a56414" exitCode=0 Jan 23 08:35:52 crc kubenswrapper[4711]: I0123 08:35:52.523550 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgl2g" event={"ID":"aedfa99e-f804-4822-b8cb-6436f4311888","Type":"ContainerDied","Data":"1b76648dbf73e6ab5302434f4a2d70603e06477e1b05258a3749a42a30a56414"} Jan 23 08:35:53 crc kubenswrapper[4711]: I0123 08:35:53.900661 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-t9x8z"] Jan 23 08:35:53 crc kubenswrapper[4711]: I0123 08:35:53.901961 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" Jan 23 08:35:53 crc kubenswrapper[4711]: I0123 08:35:53.910427 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-t9x8z"] Jan 23 08:35:53 crc kubenswrapper[4711]: I0123 08:35:53.914565 4711 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-wwncl" Jan 23 08:35:53 crc kubenswrapper[4711]: I0123 08:35:53.914638 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 23 08:35:53 crc kubenswrapper[4711]: I0123 08:35:53.914794 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.030129 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdt9q\" (UniqueName: \"kubernetes.io/projected/f4626f45-a992-460a-833a-db30b2e83041-kube-api-access-rdt9q\") pod \"cert-manager-webhook-f4fb5df64-t9x8z\" (UID: \"f4626f45-a992-460a-833a-db30b2e83041\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.030263 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f4626f45-a992-460a-833a-db30b2e83041-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-t9x8z\" (UID: \"f4626f45-a992-460a-833a-db30b2e83041\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.131875 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f4626f45-a992-460a-833a-db30b2e83041-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-t9x8z\" (UID: \"f4626f45-a992-460a-833a-db30b2e83041\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.131964 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdt9q\" (UniqueName: \"kubernetes.io/projected/f4626f45-a992-460a-833a-db30b2e83041-kube-api-access-rdt9q\") pod \"cert-manager-webhook-f4fb5df64-t9x8z\" (UID: \"f4626f45-a992-460a-833a-db30b2e83041\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.157613 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdt9q\" (UniqueName: \"kubernetes.io/projected/f4626f45-a992-460a-833a-db30b2e83041-kube-api-access-rdt9q\") pod \"cert-manager-webhook-f4fb5df64-t9x8z\" (UID: \"f4626f45-a992-460a-833a-db30b2e83041\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.163657 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f4626f45-a992-460a-833a-db30b2e83041-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-t9x8z\" (UID: \"f4626f45-a992-460a-833a-db30b2e83041\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.218647 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.537523 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgl2g" event={"ID":"aedfa99e-f804-4822-b8cb-6436f4311888","Type":"ContainerStarted","Data":"644d91fef927bc53777c646ad16c05b6f4566fac6892dfd06229a449663b8161"} Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.557340 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xgl2g" podStartSLOduration=7.518375016 podStartE2EDuration="10.557324749s" podCreationTimestamp="2026-01-23 08:35:44 +0000 UTC" firstStartedPulling="2026-01-23 08:35:50.506466466 +0000 UTC m=+936.079422834" lastFinishedPulling="2026-01-23 08:35:53.545416209 +0000 UTC m=+939.118372567" observedRunningTime="2026-01-23 08:35:54.551802556 +0000 UTC m=+940.124758924" watchObservedRunningTime="2026-01-23 08:35:54.557324749 +0000 UTC m=+940.130281107" Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.636570 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-t9x8z"] Jan 23 08:35:54 crc kubenswrapper[4711]: W0123 08:35:54.641712 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4626f45_a992_460a_833a_db30b2e83041.slice/crio-7047077a57073ef0d21b850889c3a72a4e72771dbce38bcd6330132f6f40d1b1 WatchSource:0}: Error finding container 7047077a57073ef0d21b850889c3a72a4e72771dbce38bcd6330132f6f40d1b1: Status 404 returned error can't find the container with id 7047077a57073ef0d21b850889c3a72a4e72771dbce38bcd6330132f6f40d1b1 Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.962652 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9"] Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.963748 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9" Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.967554 4711 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-44dj6" Jan 23 08:35:54 crc kubenswrapper[4711]: I0123 08:35:54.971841 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9"] Jan 23 08:35:55 crc kubenswrapper[4711]: I0123 08:35:55.144695 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckz5r\" (UniqueName: \"kubernetes.io/projected/de49b0b5-98ed-4f89-bf42-7d22260d8bb4-kube-api-access-ckz5r\") pod \"cert-manager-cainjector-855d9ccff4-zsgc9\" (UID: \"de49b0b5-98ed-4f89-bf42-7d22260d8bb4\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9" Jan 23 08:35:55 crc kubenswrapper[4711]: I0123 08:35:55.144804 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/de49b0b5-98ed-4f89-bf42-7d22260d8bb4-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-zsgc9\" (UID: \"de49b0b5-98ed-4f89-bf42-7d22260d8bb4\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9" Jan 23 08:35:55 crc kubenswrapper[4711]: I0123 08:35:55.244651 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:55 crc kubenswrapper[4711]: I0123 08:35:55.244723 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:35:55 crc kubenswrapper[4711]: I0123 08:35:55.245815 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/de49b0b5-98ed-4f89-bf42-7d22260d8bb4-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-zsgc9\" (UID: \"de49b0b5-98ed-4f89-bf42-7d22260d8bb4\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9" Jan 23 08:35:55 crc kubenswrapper[4711]: I0123 08:35:55.245881 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckz5r\" (UniqueName: \"kubernetes.io/projected/de49b0b5-98ed-4f89-bf42-7d22260d8bb4-kube-api-access-ckz5r\") pod \"cert-manager-cainjector-855d9ccff4-zsgc9\" (UID: \"de49b0b5-98ed-4f89-bf42-7d22260d8bb4\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9" Jan 23 08:35:55 crc kubenswrapper[4711]: I0123 08:35:55.275231 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/de49b0b5-98ed-4f89-bf42-7d22260d8bb4-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-zsgc9\" (UID: \"de49b0b5-98ed-4f89-bf42-7d22260d8bb4\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9" Jan 23 08:35:55 crc kubenswrapper[4711]: I0123 08:35:55.286401 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckz5r\" (UniqueName: \"kubernetes.io/projected/de49b0b5-98ed-4f89-bf42-7d22260d8bb4-kube-api-access-ckz5r\") pod \"cert-manager-cainjector-855d9ccff4-zsgc9\" (UID: \"de49b0b5-98ed-4f89-bf42-7d22260d8bb4\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9" Jan 23 08:35:55 crc kubenswrapper[4711]: I0123 08:35:55.328642 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9" Jan 23 08:35:55 crc kubenswrapper[4711]: I0123 08:35:55.545746 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" event={"ID":"f4626f45-a992-460a-833a-db30b2e83041","Type":"ContainerStarted","Data":"7047077a57073ef0d21b850889c3a72a4e72771dbce38bcd6330132f6f40d1b1"} Jan 23 08:35:55 crc kubenswrapper[4711]: I0123 08:35:55.574052 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9"] Jan 23 08:35:55 crc kubenswrapper[4711]: W0123 08:35:55.585151 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde49b0b5_98ed_4f89_bf42_7d22260d8bb4.slice/crio-9dba2a49e0878c202a8768af5c6fd1919b049957b9cd21a172434974ce9b97e0 WatchSource:0}: Error finding container 9dba2a49e0878c202a8768af5c6fd1919b049957b9cd21a172434974ce9b97e0: Status 404 returned error can't find the container with id 9dba2a49e0878c202a8768af5c6fd1919b049957b9cd21a172434974ce9b97e0 Jan 23 08:35:56 crc kubenswrapper[4711]: I0123 08:35:56.301643 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-xgl2g" podUID="aedfa99e-f804-4822-b8cb-6436f4311888" containerName="registry-server" probeResult="failure" output=< Jan 23 08:35:56 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Jan 23 08:35:56 crc kubenswrapper[4711]: > Jan 23 08:35:56 crc kubenswrapper[4711]: I0123 08:35:56.551145 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9" event={"ID":"de49b0b5-98ed-4f89-bf42-7d22260d8bb4","Type":"ContainerStarted","Data":"9dba2a49e0878c202a8768af5c6fd1919b049957b9cd21a172434974ce9b97e0"} Jan 23 08:36:04 crc kubenswrapper[4711]: I0123 08:36:04.599247 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9" event={"ID":"de49b0b5-98ed-4f89-bf42-7d22260d8bb4","Type":"ContainerStarted","Data":"3dbf435c8fa696ee178e04d219134424055cdffcb5aae2bafcf12101ef6b32f2"} Jan 23 08:36:04 crc kubenswrapper[4711]: I0123 08:36:04.602002 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" event={"ID":"f4626f45-a992-460a-833a-db30b2e83041","Type":"ContainerStarted","Data":"3044ed8359feb51b7fd65e19efd4f524ede212cfb844f17e616b0a6b70920049"} Jan 23 08:36:04 crc kubenswrapper[4711]: I0123 08:36:04.602168 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" Jan 23 08:36:04 crc kubenswrapper[4711]: I0123 08:36:04.620778 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-zsgc9" podStartSLOduration=2.23890945 podStartE2EDuration="10.620752504s" podCreationTimestamp="2026-01-23 08:35:54 +0000 UTC" firstStartedPulling="2026-01-23 08:35:55.587243138 +0000 UTC m=+941.160199516" lastFinishedPulling="2026-01-23 08:36:03.969086202 +0000 UTC m=+949.542042570" observedRunningTime="2026-01-23 08:36:04.613873157 +0000 UTC m=+950.186829525" watchObservedRunningTime="2026-01-23 08:36:04.620752504 +0000 UTC m=+950.193708872" Jan 23 08:36:04 crc kubenswrapper[4711]: I0123 08:36:04.636929 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" podStartSLOduration=2.326153043 podStartE2EDuration="11.636909405s" podCreationTimestamp="2026-01-23 08:35:53 +0000 UTC" firstStartedPulling="2026-01-23 08:35:54.643144112 +0000 UTC m=+940.216100480" lastFinishedPulling="2026-01-23 08:36:03.953900474 +0000 UTC m=+949.526856842" observedRunningTime="2026-01-23 08:36:04.631588777 +0000 UTC m=+950.204545145" watchObservedRunningTime="2026-01-23 08:36:04.636909405 +0000 UTC m=+950.209865773" Jan 23 08:36:05 crc kubenswrapper[4711]: I0123 08:36:05.287430 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:36:05 crc kubenswrapper[4711]: I0123 08:36:05.337936 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:36:05 crc kubenswrapper[4711]: I0123 08:36:05.517255 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgl2g"] Jan 23 08:36:06 crc kubenswrapper[4711]: I0123 08:36:06.622954 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xgl2g" podUID="aedfa99e-f804-4822-b8cb-6436f4311888" containerName="registry-server" containerID="cri-o://644d91fef927bc53777c646ad16c05b6f4566fac6892dfd06229a449663b8161" gracePeriod=2 Jan 23 08:36:09 crc kubenswrapper[4711]: I0123 08:36:09.221797 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-t9x8z" Jan 23 08:36:12 crc kubenswrapper[4711]: I0123 08:36:12.765912 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-8m4r4"] Jan 23 08:36:12 crc kubenswrapper[4711]: I0123 08:36:12.767363 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-8m4r4" Jan 23 08:36:12 crc kubenswrapper[4711]: I0123 08:36:12.775978 4711 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-rr2pb" Jan 23 08:36:12 crc kubenswrapper[4711]: I0123 08:36:12.782414 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-8m4r4"] Jan 23 08:36:12 crc kubenswrapper[4711]: I0123 08:36:12.894011 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a47eec5-3714-4461-841a-d47b62502c91-bound-sa-token\") pod \"cert-manager-86cb77c54b-8m4r4\" (UID: \"2a47eec5-3714-4461-841a-d47b62502c91\") " pod="cert-manager/cert-manager-86cb77c54b-8m4r4" Jan 23 08:36:12 crc kubenswrapper[4711]: I0123 08:36:12.894087 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhtwm\" (UniqueName: \"kubernetes.io/projected/2a47eec5-3714-4461-841a-d47b62502c91-kube-api-access-qhtwm\") pod \"cert-manager-86cb77c54b-8m4r4\" (UID: \"2a47eec5-3714-4461-841a-d47b62502c91\") " pod="cert-manager/cert-manager-86cb77c54b-8m4r4" Jan 23 08:36:12 crc kubenswrapper[4711]: I0123 08:36:12.995460 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a47eec5-3714-4461-841a-d47b62502c91-bound-sa-token\") pod \"cert-manager-86cb77c54b-8m4r4\" (UID: \"2a47eec5-3714-4461-841a-d47b62502c91\") " pod="cert-manager/cert-manager-86cb77c54b-8m4r4" Jan 23 08:36:12 crc kubenswrapper[4711]: I0123 08:36:12.995548 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhtwm\" (UniqueName: \"kubernetes.io/projected/2a47eec5-3714-4461-841a-d47b62502c91-kube-api-access-qhtwm\") pod \"cert-manager-86cb77c54b-8m4r4\" (UID: \"2a47eec5-3714-4461-841a-d47b62502c91\") " pod="cert-manager/cert-manager-86cb77c54b-8m4r4" Jan 23 08:36:13 crc kubenswrapper[4711]: I0123 08:36:13.022197 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a47eec5-3714-4461-841a-d47b62502c91-bound-sa-token\") pod \"cert-manager-86cb77c54b-8m4r4\" (UID: \"2a47eec5-3714-4461-841a-d47b62502c91\") " pod="cert-manager/cert-manager-86cb77c54b-8m4r4" Jan 23 08:36:13 crc kubenswrapper[4711]: I0123 08:36:13.024320 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhtwm\" (UniqueName: \"kubernetes.io/projected/2a47eec5-3714-4461-841a-d47b62502c91-kube-api-access-qhtwm\") pod \"cert-manager-86cb77c54b-8m4r4\" (UID: \"2a47eec5-3714-4461-841a-d47b62502c91\") " pod="cert-manager/cert-manager-86cb77c54b-8m4r4" Jan 23 08:36:13 crc kubenswrapper[4711]: I0123 08:36:13.112335 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-8m4r4" Jan 23 08:36:13 crc kubenswrapper[4711]: I0123 08:36:13.345358 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-8m4r4"] Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.059718 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgl2g_aedfa99e-f804-4822-b8cb-6436f4311888/registry-server/0.log" Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.060884 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.223217 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aedfa99e-f804-4822-b8cb-6436f4311888-utilities\") pod \"aedfa99e-f804-4822-b8cb-6436f4311888\" (UID: \"aedfa99e-f804-4822-b8cb-6436f4311888\") " Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.223348 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aedfa99e-f804-4822-b8cb-6436f4311888-catalog-content\") pod \"aedfa99e-f804-4822-b8cb-6436f4311888\" (UID: \"aedfa99e-f804-4822-b8cb-6436f4311888\") " Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.223971 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aedfa99e-f804-4822-b8cb-6436f4311888-utilities" (OuterVolumeSpecName: "utilities") pod "aedfa99e-f804-4822-b8cb-6436f4311888" (UID: "aedfa99e-f804-4822-b8cb-6436f4311888"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.225796 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqw2t\" (UniqueName: \"kubernetes.io/projected/aedfa99e-f804-4822-b8cb-6436f4311888-kube-api-access-tqw2t\") pod \"aedfa99e-f804-4822-b8cb-6436f4311888\" (UID: \"aedfa99e-f804-4822-b8cb-6436f4311888\") " Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.226179 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aedfa99e-f804-4822-b8cb-6436f4311888-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.230683 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aedfa99e-f804-4822-b8cb-6436f4311888-kube-api-access-tqw2t" (OuterVolumeSpecName: "kube-api-access-tqw2t") pod "aedfa99e-f804-4822-b8cb-6436f4311888" (UID: "aedfa99e-f804-4822-b8cb-6436f4311888"). InnerVolumeSpecName "kube-api-access-tqw2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.247051 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aedfa99e-f804-4822-b8cb-6436f4311888-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aedfa99e-f804-4822-b8cb-6436f4311888" (UID: "aedfa99e-f804-4822-b8cb-6436f4311888"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.327726 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqw2t\" (UniqueName: \"kubernetes.io/projected/aedfa99e-f804-4822-b8cb-6436f4311888-kube-api-access-tqw2t\") on node \"crc\" DevicePath \"\"" Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.327766 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aedfa99e-f804-4822-b8cb-6436f4311888-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.364109 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-xgl2g_aedfa99e-f804-4822-b8cb-6436f4311888/registry-server/0.log" Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.365516 4711 generic.go:334] "Generic (PLEG): container finished" podID="aedfa99e-f804-4822-b8cb-6436f4311888" containerID="644d91fef927bc53777c646ad16c05b6f4566fac6892dfd06229a449663b8161" exitCode=137 Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.365558 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgl2g" event={"ID":"aedfa99e-f804-4822-b8cb-6436f4311888","Type":"ContainerDied","Data":"644d91fef927bc53777c646ad16c05b6f4566fac6892dfd06229a449663b8161"} Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.365591 4711 scope.go:117] "RemoveContainer" containerID="644d91fef927bc53777c646ad16c05b6f4566fac6892dfd06229a449663b8161" Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.379358 4711 scope.go:117] "RemoveContainer" containerID="1b76648dbf73e6ab5302434f4a2d70603e06477e1b05258a3749a42a30a56414" Jan 23 08:36:15 crc kubenswrapper[4711]: I0123 08:36:15.396142 4711 scope.go:117] "RemoveContainer" containerID="8095228adcc6ed98f8114900710375a16ec5edd1b87152fa911fb1f95c3b4590" Jan 23 08:36:16 crc kubenswrapper[4711]: I0123 08:36:16.375483 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-8m4r4" event={"ID":"2a47eec5-3714-4461-841a-d47b62502c91","Type":"ContainerStarted","Data":"33ae4fdd957d4104c20eee413947e25f775c57d87d73d29f7dc64e16153dd4fd"} Jan 23 08:36:16 crc kubenswrapper[4711]: I0123 08:36:16.375971 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-8m4r4" event={"ID":"2a47eec5-3714-4461-841a-d47b62502c91","Type":"ContainerStarted","Data":"f7142f8fc5d9cbd0d6d418960ea563462d14181e7e01300d6dc494edf1848417"} Jan 23 08:36:16 crc kubenswrapper[4711]: I0123 08:36:16.377570 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgl2g" event={"ID":"aedfa99e-f804-4822-b8cb-6436f4311888","Type":"ContainerDied","Data":"a255a262476fadf569aaa725b2909cd382299fe0fe8136057193d0075767bbb6"} Jan 23 08:36:16 crc kubenswrapper[4711]: I0123 08:36:16.377621 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgl2g" Jan 23 08:36:16 crc kubenswrapper[4711]: I0123 08:36:16.397555 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-8m4r4" podStartSLOduration=4.397532116 podStartE2EDuration="4.397532116s" podCreationTimestamp="2026-01-23 08:36:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:36:16.393027736 +0000 UTC m=+961.965984134" watchObservedRunningTime="2026-01-23 08:36:16.397532116 +0000 UTC m=+961.970488484" Jan 23 08:36:16 crc kubenswrapper[4711]: I0123 08:36:16.424098 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgl2g"] Jan 23 08:36:16 crc kubenswrapper[4711]: I0123 08:36:16.429618 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgl2g"] Jan 23 08:36:17 crc kubenswrapper[4711]: I0123 08:36:17.480413 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aedfa99e-f804-4822-b8cb-6436f4311888" path="/var/lib/kubelet/pods/aedfa99e-f804-4822-b8cb-6436f4311888/volumes" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.637282 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-4sj65"] Jan 23 08:36:22 crc kubenswrapper[4711]: E0123 08:36:22.637880 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aedfa99e-f804-4822-b8cb-6436f4311888" containerName="registry-server" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.637906 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="aedfa99e-f804-4822-b8cb-6436f4311888" containerName="registry-server" Jan 23 08:36:22 crc kubenswrapper[4711]: E0123 08:36:22.637924 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aedfa99e-f804-4822-b8cb-6436f4311888" containerName="extract-content" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.637930 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="aedfa99e-f804-4822-b8cb-6436f4311888" containerName="extract-content" Jan 23 08:36:22 crc kubenswrapper[4711]: E0123 08:36:22.637939 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aedfa99e-f804-4822-b8cb-6436f4311888" containerName="extract-utilities" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.637945 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="aedfa99e-f804-4822-b8cb-6436f4311888" containerName="extract-utilities" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.638041 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="aedfa99e-f804-4822-b8cb-6436f4311888" containerName="registry-server" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.638426 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4sj65" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.641478 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-d4n7j" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.642878 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.644678 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.658953 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4sj65"] Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.819248 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckv2h\" (UniqueName: \"kubernetes.io/projected/5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081-kube-api-access-ckv2h\") pod \"openstack-operator-index-4sj65\" (UID: \"5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081\") " pod="openstack-operators/openstack-operator-index-4sj65" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.920323 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckv2h\" (UniqueName: \"kubernetes.io/projected/5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081-kube-api-access-ckv2h\") pod \"openstack-operator-index-4sj65\" (UID: \"5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081\") " pod="openstack-operators/openstack-operator-index-4sj65" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.940243 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckv2h\" (UniqueName: \"kubernetes.io/projected/5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081-kube-api-access-ckv2h\") pod \"openstack-operator-index-4sj65\" (UID: \"5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081\") " pod="openstack-operators/openstack-operator-index-4sj65" Jan 23 08:36:22 crc kubenswrapper[4711]: I0123 08:36:22.969850 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4sj65" Jan 23 08:36:23 crc kubenswrapper[4711]: I0123 08:36:23.398203 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4sj65"] Jan 23 08:36:23 crc kubenswrapper[4711]: I0123 08:36:23.425138 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4sj65" event={"ID":"5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081","Type":"ContainerStarted","Data":"3ce137ed1dd487d73ff37fa03f39911bcbf33fd8cec20b0bb8f53ba1fd8de30e"} Jan 23 08:36:26 crc kubenswrapper[4711]: I0123 08:36:26.011673 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-4sj65"] Jan 23 08:36:26 crc kubenswrapper[4711]: I0123 08:36:26.617960 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-2fdmb"] Jan 23 08:36:26 crc kubenswrapper[4711]: I0123 08:36:26.622180 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2fdmb" Jan 23 08:36:26 crc kubenswrapper[4711]: I0123 08:36:26.627418 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2fdmb"] Jan 23 08:36:26 crc kubenswrapper[4711]: I0123 08:36:26.776695 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5fxk\" (UniqueName: \"kubernetes.io/projected/eb73ab49-65f1-446a-960e-035803896d9a-kube-api-access-w5fxk\") pod \"openstack-operator-index-2fdmb\" (UID: \"eb73ab49-65f1-446a-960e-035803896d9a\") " pod="openstack-operators/openstack-operator-index-2fdmb" Jan 23 08:36:26 crc kubenswrapper[4711]: I0123 08:36:26.877724 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5fxk\" (UniqueName: \"kubernetes.io/projected/eb73ab49-65f1-446a-960e-035803896d9a-kube-api-access-w5fxk\") pod \"openstack-operator-index-2fdmb\" (UID: \"eb73ab49-65f1-446a-960e-035803896d9a\") " pod="openstack-operators/openstack-operator-index-2fdmb" Jan 23 08:36:26 crc kubenswrapper[4711]: I0123 08:36:26.899433 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5fxk\" (UniqueName: \"kubernetes.io/projected/eb73ab49-65f1-446a-960e-035803896d9a-kube-api-access-w5fxk\") pod \"openstack-operator-index-2fdmb\" (UID: \"eb73ab49-65f1-446a-960e-035803896d9a\") " pod="openstack-operators/openstack-operator-index-2fdmb" Jan 23 08:36:26 crc kubenswrapper[4711]: I0123 08:36:26.939697 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2fdmb" Jan 23 08:36:27 crc kubenswrapper[4711]: I0123 08:36:27.319535 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2fdmb"] Jan 23 08:36:27 crc kubenswrapper[4711]: I0123 08:36:27.447817 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4sj65" event={"ID":"5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081","Type":"ContainerStarted","Data":"5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8"} Jan 23 08:36:27 crc kubenswrapper[4711]: I0123 08:36:27.448270 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-4sj65" podUID="5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081" containerName="registry-server" containerID="cri-o://5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8" gracePeriod=2 Jan 23 08:36:27 crc kubenswrapper[4711]: I0123 08:36:27.450177 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2fdmb" event={"ID":"eb73ab49-65f1-446a-960e-035803896d9a","Type":"ContainerStarted","Data":"b9a38f6b2f2278614f226a9c57a3b7212669d92ec33636e5604850c51cbdcab9"} Jan 23 08:36:27 crc kubenswrapper[4711]: I0123 08:36:27.470002 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-4sj65" podStartSLOduration=2.406182803 podStartE2EDuration="5.469987171s" podCreationTimestamp="2026-01-23 08:36:22 +0000 UTC" firstStartedPulling="2026-01-23 08:36:23.40958345 +0000 UTC m=+968.982539818" lastFinishedPulling="2026-01-23 08:36:26.473387818 +0000 UTC m=+972.046344186" observedRunningTime="2026-01-23 08:36:27.468109015 +0000 UTC m=+973.041065383" watchObservedRunningTime="2026-01-23 08:36:27.469987171 +0000 UTC m=+973.042943539" Jan 23 08:36:27 crc kubenswrapper[4711]: I0123 08:36:27.787272 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4sj65" Jan 23 08:36:27 crc kubenswrapper[4711]: I0123 08:36:27.891012 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckv2h\" (UniqueName: \"kubernetes.io/projected/5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081-kube-api-access-ckv2h\") pod \"5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081\" (UID: \"5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081\") " Jan 23 08:36:27 crc kubenswrapper[4711]: I0123 08:36:27.896057 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081-kube-api-access-ckv2h" (OuterVolumeSpecName: "kube-api-access-ckv2h") pod "5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081" (UID: "5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081"). InnerVolumeSpecName "kube-api-access-ckv2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:36:27 crc kubenswrapper[4711]: I0123 08:36:27.992577 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckv2h\" (UniqueName: \"kubernetes.io/projected/5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081-kube-api-access-ckv2h\") on node \"crc\" DevicePath \"\"" Jan 23 08:36:28 crc kubenswrapper[4711]: I0123 08:36:28.457227 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2fdmb" event={"ID":"eb73ab49-65f1-446a-960e-035803896d9a","Type":"ContainerStarted","Data":"a4fc5d78500bdc10b984a40bda02660a71a2730b2a84fb52e3844c312a72b198"} Jan 23 08:36:28 crc kubenswrapper[4711]: I0123 08:36:28.458902 4711 generic.go:334] "Generic (PLEG): container finished" podID="5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081" containerID="5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8" exitCode=0 Jan 23 08:36:28 crc kubenswrapper[4711]: I0123 08:36:28.458968 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4sj65" Jan 23 08:36:28 crc kubenswrapper[4711]: I0123 08:36:28.458974 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4sj65" event={"ID":"5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081","Type":"ContainerDied","Data":"5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8"} Jan 23 08:36:28 crc kubenswrapper[4711]: I0123 08:36:28.459032 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4sj65" event={"ID":"5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081","Type":"ContainerDied","Data":"3ce137ed1dd487d73ff37fa03f39911bcbf33fd8cec20b0bb8f53ba1fd8de30e"} Jan 23 08:36:28 crc kubenswrapper[4711]: I0123 08:36:28.459056 4711 scope.go:117] "RemoveContainer" containerID="5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8" Jan 23 08:36:28 crc kubenswrapper[4711]: I0123 08:36:28.479224 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-2fdmb" podStartSLOduration=2.295454347 podStartE2EDuration="2.479200612s" podCreationTimestamp="2026-01-23 08:36:26 +0000 UTC" firstStartedPulling="2026-01-23 08:36:27.331351829 +0000 UTC m=+972.904308187" lastFinishedPulling="2026-01-23 08:36:27.515098084 +0000 UTC m=+973.088054452" observedRunningTime="2026-01-23 08:36:28.472527688 +0000 UTC m=+974.045484056" watchObservedRunningTime="2026-01-23 08:36:28.479200612 +0000 UTC m=+974.052157000" Jan 23 08:36:28 crc kubenswrapper[4711]: I0123 08:36:28.483088 4711 scope.go:117] "RemoveContainer" containerID="5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8" Jan 23 08:36:28 crc kubenswrapper[4711]: E0123 08:36:28.484999 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8\": container with ID starting with 5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8 not found: ID does not exist" containerID="5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8" Jan 23 08:36:28 crc kubenswrapper[4711]: I0123 08:36:28.485052 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8"} err="failed to get container status \"5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8\": rpc error: code = NotFound desc = could not find container \"5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8\": container with ID starting with 5b431b446d18948a07df5aa073f36103a06fdc747005f33df04fec0e8eda8fe8 not found: ID does not exist" Jan 23 08:36:28 crc kubenswrapper[4711]: I0123 08:36:28.490228 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-4sj65"] Jan 23 08:36:28 crc kubenswrapper[4711]: I0123 08:36:28.494746 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-4sj65"] Jan 23 08:36:29 crc kubenswrapper[4711]: I0123 08:36:29.482520 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081" path="/var/lib/kubelet/pods/5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081/volumes" Jan 23 08:36:36 crc kubenswrapper[4711]: I0123 08:36:36.940767 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-2fdmb" Jan 23 08:36:36 crc kubenswrapper[4711]: I0123 08:36:36.941353 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-2fdmb" Jan 23 08:36:36 crc kubenswrapper[4711]: I0123 08:36:36.967625 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-2fdmb" Jan 23 08:36:37 crc kubenswrapper[4711]: I0123 08:36:37.535540 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-2fdmb" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.468983 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl"] Jan 23 08:36:43 crc kubenswrapper[4711]: E0123 08:36:43.470027 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081" containerName="registry-server" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.470048 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081" containerName="registry-server" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.470338 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f8764fa-6aa6-4a9f-aad4-17e6ffd2d081" containerName="registry-server" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.471784 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.473988 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-n282r" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.481842 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl"] Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.620548 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c1bb2b11-1e9e-4000-996b-8097bcc3a448-util\") pod \"e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl\" (UID: \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\") " pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.621749 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c1bb2b11-1e9e-4000-996b-8097bcc3a448-bundle\") pod \"e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl\" (UID: \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\") " pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.621846 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmztn\" (UniqueName: \"kubernetes.io/projected/c1bb2b11-1e9e-4000-996b-8097bcc3a448-kube-api-access-zmztn\") pod \"e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl\" (UID: \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\") " pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.723350 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c1bb2b11-1e9e-4000-996b-8097bcc3a448-util\") pod \"e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl\" (UID: \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\") " pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.723417 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c1bb2b11-1e9e-4000-996b-8097bcc3a448-bundle\") pod \"e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl\" (UID: \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\") " pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.723449 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmztn\" (UniqueName: \"kubernetes.io/projected/c1bb2b11-1e9e-4000-996b-8097bcc3a448-kube-api-access-zmztn\") pod \"e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl\" (UID: \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\") " pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.723941 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c1bb2b11-1e9e-4000-996b-8097bcc3a448-bundle\") pod \"e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl\" (UID: \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\") " pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.724068 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c1bb2b11-1e9e-4000-996b-8097bcc3a448-util\") pod \"e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl\" (UID: \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\") " pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.740930 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmztn\" (UniqueName: \"kubernetes.io/projected/c1bb2b11-1e9e-4000-996b-8097bcc3a448-kube-api-access-zmztn\") pod \"e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl\" (UID: \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\") " pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.791861 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:43 crc kubenswrapper[4711]: I0123 08:36:43.966890 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl"] Jan 23 08:36:44 crc kubenswrapper[4711]: I0123 08:36:44.557135 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" event={"ID":"c1bb2b11-1e9e-4000-996b-8097bcc3a448","Type":"ContainerStarted","Data":"50368e9d4763d06454d06900b70a690f39e7d6c374840051dcdbbdc3cd495fe1"} Jan 23 08:36:46 crc kubenswrapper[4711]: I0123 08:36:46.531786 4711 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-g2w8q container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.19:8443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 23 08:36:46 crc kubenswrapper[4711]: I0123 08:36:46.531884 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-g2w8q" podUID="df27a7ac-56ad-458a-8954-4177f65db5ac" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 23 08:36:48 crc kubenswrapper[4711]: I0123 08:36:48.585769 4711 generic.go:334] "Generic (PLEG): container finished" podID="c1bb2b11-1e9e-4000-996b-8097bcc3a448" containerID="3b1708f48d707e0c71b5dba4565cb4801d0602cb9ce543b09e3d230d01f3508c" exitCode=0 Jan 23 08:36:48 crc kubenswrapper[4711]: I0123 08:36:48.585853 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" event={"ID":"c1bb2b11-1e9e-4000-996b-8097bcc3a448","Type":"ContainerDied","Data":"3b1708f48d707e0c71b5dba4565cb4801d0602cb9ce543b09e3d230d01f3508c"} Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.024433 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hx9s4"] Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.027213 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.035562 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hx9s4"] Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.096965 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41085c31-2a7f-4717-8548-ff09637820e0-catalog-content\") pod \"community-operators-hx9s4\" (UID: \"41085c31-2a7f-4717-8548-ff09637820e0\") " pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.097056 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zlb6\" (UniqueName: \"kubernetes.io/projected/41085c31-2a7f-4717-8548-ff09637820e0-kube-api-access-7zlb6\") pod \"community-operators-hx9s4\" (UID: \"41085c31-2a7f-4717-8548-ff09637820e0\") " pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.097086 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41085c31-2a7f-4717-8548-ff09637820e0-utilities\") pod \"community-operators-hx9s4\" (UID: \"41085c31-2a7f-4717-8548-ff09637820e0\") " pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.197907 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41085c31-2a7f-4717-8548-ff09637820e0-catalog-content\") pod \"community-operators-hx9s4\" (UID: \"41085c31-2a7f-4717-8548-ff09637820e0\") " pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.198001 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zlb6\" (UniqueName: \"kubernetes.io/projected/41085c31-2a7f-4717-8548-ff09637820e0-kube-api-access-7zlb6\") pod \"community-operators-hx9s4\" (UID: \"41085c31-2a7f-4717-8548-ff09637820e0\") " pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.198032 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41085c31-2a7f-4717-8548-ff09637820e0-utilities\") pod \"community-operators-hx9s4\" (UID: \"41085c31-2a7f-4717-8548-ff09637820e0\") " pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.198368 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41085c31-2a7f-4717-8548-ff09637820e0-catalog-content\") pod \"community-operators-hx9s4\" (UID: \"41085c31-2a7f-4717-8548-ff09637820e0\") " pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.198443 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41085c31-2a7f-4717-8548-ff09637820e0-utilities\") pod \"community-operators-hx9s4\" (UID: \"41085c31-2a7f-4717-8548-ff09637820e0\") " pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.216645 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zlb6\" (UniqueName: \"kubernetes.io/projected/41085c31-2a7f-4717-8548-ff09637820e0-kube-api-access-7zlb6\") pod \"community-operators-hx9s4\" (UID: \"41085c31-2a7f-4717-8548-ff09637820e0\") " pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.375313 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.594417 4711 generic.go:334] "Generic (PLEG): container finished" podID="c1bb2b11-1e9e-4000-996b-8097bcc3a448" containerID="e3e0d55dd21dcb60fabc6f1d5cca991e09dfdb8780e551aeb64dba04782d8691" exitCode=0 Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.594528 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" event={"ID":"c1bb2b11-1e9e-4000-996b-8097bcc3a448","Type":"ContainerDied","Data":"e3e0d55dd21dcb60fabc6f1d5cca991e09dfdb8780e551aeb64dba04782d8691"} Jan 23 08:36:49 crc kubenswrapper[4711]: I0123 08:36:49.596026 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hx9s4"] Jan 23 08:36:50 crc kubenswrapper[4711]: I0123 08:36:50.602718 4711 generic.go:334] "Generic (PLEG): container finished" podID="c1bb2b11-1e9e-4000-996b-8097bcc3a448" containerID="709cf35dcfbb415d9c7c00dc70858c6c89b22ae829d32f95be471a341bb17ceb" exitCode=0 Jan 23 08:36:50 crc kubenswrapper[4711]: I0123 08:36:50.602785 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" event={"ID":"c1bb2b11-1e9e-4000-996b-8097bcc3a448","Type":"ContainerDied","Data":"709cf35dcfbb415d9c7c00dc70858c6c89b22ae829d32f95be471a341bb17ceb"} Jan 23 08:36:50 crc kubenswrapper[4711]: I0123 08:36:50.605851 4711 generic.go:334] "Generic (PLEG): container finished" podID="41085c31-2a7f-4717-8548-ff09637820e0" containerID="91386158a6dc7a96ebb3654d14fb2ef888255333c8df45cd9cabafcf5ffd2d97" exitCode=0 Jan 23 08:36:50 crc kubenswrapper[4711]: I0123 08:36:50.605886 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hx9s4" event={"ID":"41085c31-2a7f-4717-8548-ff09637820e0","Type":"ContainerDied","Data":"91386158a6dc7a96ebb3654d14fb2ef888255333c8df45cd9cabafcf5ffd2d97"} Jan 23 08:36:50 crc kubenswrapper[4711]: I0123 08:36:50.605913 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hx9s4" event={"ID":"41085c31-2a7f-4717-8548-ff09637820e0","Type":"ContainerStarted","Data":"4ddf184876ae6fa718dc0beaf45c057d6e1b07514a7a2842632575802ff73553"} Jan 23 08:36:51 crc kubenswrapper[4711]: I0123 08:36:51.845173 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:51 crc kubenswrapper[4711]: I0123 08:36:51.934335 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c1bb2b11-1e9e-4000-996b-8097bcc3a448-util\") pod \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\" (UID: \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\") " Jan 23 08:36:51 crc kubenswrapper[4711]: I0123 08:36:51.934373 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmztn\" (UniqueName: \"kubernetes.io/projected/c1bb2b11-1e9e-4000-996b-8097bcc3a448-kube-api-access-zmztn\") pod \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\" (UID: \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\") " Jan 23 08:36:51 crc kubenswrapper[4711]: I0123 08:36:51.934416 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c1bb2b11-1e9e-4000-996b-8097bcc3a448-bundle\") pod \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\" (UID: \"c1bb2b11-1e9e-4000-996b-8097bcc3a448\") " Jan 23 08:36:51 crc kubenswrapper[4711]: I0123 08:36:51.935150 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1bb2b11-1e9e-4000-996b-8097bcc3a448-bundle" (OuterVolumeSpecName: "bundle") pod "c1bb2b11-1e9e-4000-996b-8097bcc3a448" (UID: "c1bb2b11-1e9e-4000-996b-8097bcc3a448"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:36:51 crc kubenswrapper[4711]: I0123 08:36:51.940799 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1bb2b11-1e9e-4000-996b-8097bcc3a448-kube-api-access-zmztn" (OuterVolumeSpecName: "kube-api-access-zmztn") pod "c1bb2b11-1e9e-4000-996b-8097bcc3a448" (UID: "c1bb2b11-1e9e-4000-996b-8097bcc3a448"). InnerVolumeSpecName "kube-api-access-zmztn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:36:51 crc kubenswrapper[4711]: I0123 08:36:51.953408 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1bb2b11-1e9e-4000-996b-8097bcc3a448-util" (OuterVolumeSpecName: "util") pod "c1bb2b11-1e9e-4000-996b-8097bcc3a448" (UID: "c1bb2b11-1e9e-4000-996b-8097bcc3a448"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:36:52 crc kubenswrapper[4711]: I0123 08:36:52.035650 4711 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c1bb2b11-1e9e-4000-996b-8097bcc3a448-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:36:52 crc kubenswrapper[4711]: I0123 08:36:52.035715 4711 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c1bb2b11-1e9e-4000-996b-8097bcc3a448-util\") on node \"crc\" DevicePath \"\"" Jan 23 08:36:52 crc kubenswrapper[4711]: I0123 08:36:52.035729 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmztn\" (UniqueName: \"kubernetes.io/projected/c1bb2b11-1e9e-4000-996b-8097bcc3a448-kube-api-access-zmztn\") on node \"crc\" DevicePath \"\"" Jan 23 08:36:52 crc kubenswrapper[4711]: I0123 08:36:52.619042 4711 generic.go:334] "Generic (PLEG): container finished" podID="41085c31-2a7f-4717-8548-ff09637820e0" containerID="660f98d6640ace7c3079ca363ccd45a301ea9ff992652037b87e651381f04f60" exitCode=0 Jan 23 08:36:52 crc kubenswrapper[4711]: I0123 08:36:52.619159 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hx9s4" event={"ID":"41085c31-2a7f-4717-8548-ff09637820e0","Type":"ContainerDied","Data":"660f98d6640ace7c3079ca363ccd45a301ea9ff992652037b87e651381f04f60"} Jan 23 08:36:52 crc kubenswrapper[4711]: I0123 08:36:52.665778 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" event={"ID":"c1bb2b11-1e9e-4000-996b-8097bcc3a448","Type":"ContainerDied","Data":"50368e9d4763d06454d06900b70a690f39e7d6c374840051dcdbbdc3cd495fe1"} Jan 23 08:36:52 crc kubenswrapper[4711]: I0123 08:36:52.665827 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50368e9d4763d06454d06900b70a690f39e7d6c374840051dcdbbdc3cd495fe1" Jan 23 08:36:52 crc kubenswrapper[4711]: I0123 08:36:52.665916 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl" Jan 23 08:36:53 crc kubenswrapper[4711]: I0123 08:36:53.675398 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hx9s4" event={"ID":"41085c31-2a7f-4717-8548-ff09637820e0","Type":"ContainerStarted","Data":"3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d"} Jan 23 08:36:53 crc kubenswrapper[4711]: I0123 08:36:53.695343 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hx9s4" podStartSLOduration=2.162654707 podStartE2EDuration="4.69532657s" podCreationTimestamp="2026-01-23 08:36:49 +0000 UTC" firstStartedPulling="2026-01-23 08:36:50.607800791 +0000 UTC m=+996.180757179" lastFinishedPulling="2026-01-23 08:36:53.140472674 +0000 UTC m=+998.713429042" observedRunningTime="2026-01-23 08:36:53.693457313 +0000 UTC m=+999.266413681" watchObservedRunningTime="2026-01-23 08:36:53.69532657 +0000 UTC m=+999.268282938" Jan 23 08:36:54 crc kubenswrapper[4711]: I0123 08:36:54.673304 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9"] Jan 23 08:36:54 crc kubenswrapper[4711]: E0123 08:36:54.673632 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1bb2b11-1e9e-4000-996b-8097bcc3a448" containerName="util" Jan 23 08:36:54 crc kubenswrapper[4711]: I0123 08:36:54.673652 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1bb2b11-1e9e-4000-996b-8097bcc3a448" containerName="util" Jan 23 08:36:54 crc kubenswrapper[4711]: E0123 08:36:54.673668 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1bb2b11-1e9e-4000-996b-8097bcc3a448" containerName="extract" Jan 23 08:36:54 crc kubenswrapper[4711]: I0123 08:36:54.673676 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1bb2b11-1e9e-4000-996b-8097bcc3a448" containerName="extract" Jan 23 08:36:54 crc kubenswrapper[4711]: E0123 08:36:54.673693 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1bb2b11-1e9e-4000-996b-8097bcc3a448" containerName="pull" Jan 23 08:36:54 crc kubenswrapper[4711]: I0123 08:36:54.673702 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1bb2b11-1e9e-4000-996b-8097bcc3a448" containerName="pull" Jan 23 08:36:54 crc kubenswrapper[4711]: I0123 08:36:54.673844 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1bb2b11-1e9e-4000-996b-8097bcc3a448" containerName="extract" Jan 23 08:36:54 crc kubenswrapper[4711]: I0123 08:36:54.674293 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" Jan 23 08:36:54 crc kubenswrapper[4711]: I0123 08:36:54.676280 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-ntd5t" Jan 23 08:36:54 crc kubenswrapper[4711]: I0123 08:36:54.691156 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9"] Jan 23 08:36:54 crc kubenswrapper[4711]: I0123 08:36:54.791863 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nnk9\" (UniqueName: \"kubernetes.io/projected/b302d05c-2499-4bf0-a271-29e930bf8c0d-kube-api-access-7nnk9\") pod \"openstack-operator-controller-init-59f56ff984-tx7z9\" (UID: \"b302d05c-2499-4bf0-a271-29e930bf8c0d\") " pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" Jan 23 08:36:54 crc kubenswrapper[4711]: I0123 08:36:54.893025 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nnk9\" (UniqueName: \"kubernetes.io/projected/b302d05c-2499-4bf0-a271-29e930bf8c0d-kube-api-access-7nnk9\") pod \"openstack-operator-controller-init-59f56ff984-tx7z9\" (UID: \"b302d05c-2499-4bf0-a271-29e930bf8c0d\") " pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" Jan 23 08:36:54 crc kubenswrapper[4711]: I0123 08:36:54.911564 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nnk9\" (UniqueName: \"kubernetes.io/projected/b302d05c-2499-4bf0-a271-29e930bf8c0d-kube-api-access-7nnk9\") pod \"openstack-operator-controller-init-59f56ff984-tx7z9\" (UID: \"b302d05c-2499-4bf0-a271-29e930bf8c0d\") " pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" Jan 23 08:36:54 crc kubenswrapper[4711]: I0123 08:36:54.992687 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" Jan 23 08:36:55 crc kubenswrapper[4711]: I0123 08:36:55.429963 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9"] Jan 23 08:36:55 crc kubenswrapper[4711]: W0123 08:36:55.434331 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb302d05c_2499_4bf0_a271_29e930bf8c0d.slice/crio-35ac33e007b854449daf2cb00ec9217bf04c93fa53d983d95b5102bb3a04daf3 WatchSource:0}: Error finding container 35ac33e007b854449daf2cb00ec9217bf04c93fa53d983d95b5102bb3a04daf3: Status 404 returned error can't find the container with id 35ac33e007b854449daf2cb00ec9217bf04c93fa53d983d95b5102bb3a04daf3 Jan 23 08:36:55 crc kubenswrapper[4711]: I0123 08:36:55.688389 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" event={"ID":"b302d05c-2499-4bf0-a271-29e930bf8c0d","Type":"ContainerStarted","Data":"35ac33e007b854449daf2cb00ec9217bf04c93fa53d983d95b5102bb3a04daf3"} Jan 23 08:36:55 crc kubenswrapper[4711]: I0123 08:36:55.993777 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:36:55 crc kubenswrapper[4711]: I0123 08:36:55.993826 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:36:59 crc kubenswrapper[4711]: I0123 08:36:59.376112 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:59 crc kubenswrapper[4711]: I0123 08:36:59.376793 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:59 crc kubenswrapper[4711]: I0123 08:36:59.426335 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:36:59 crc kubenswrapper[4711]: I0123 08:36:59.776370 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:37:01 crc kubenswrapper[4711]: I0123 08:37:01.745541 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" event={"ID":"b302d05c-2499-4bf0-a271-29e930bf8c0d","Type":"ContainerStarted","Data":"d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341"} Jan 23 08:37:01 crc kubenswrapper[4711]: I0123 08:37:01.745920 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" Jan 23 08:37:01 crc kubenswrapper[4711]: I0123 08:37:01.782458 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" podStartSLOduration=2.541384171 podStartE2EDuration="7.782433947s" podCreationTimestamp="2026-01-23 08:36:54 +0000 UTC" firstStartedPulling="2026-01-23 08:36:55.436607191 +0000 UTC m=+1001.009563549" lastFinishedPulling="2026-01-23 08:37:00.677656937 +0000 UTC m=+1006.250613325" observedRunningTime="2026-01-23 08:37:01.779441792 +0000 UTC m=+1007.352398180" watchObservedRunningTime="2026-01-23 08:37:01.782433947 +0000 UTC m=+1007.355390315" Jan 23 08:37:01 crc kubenswrapper[4711]: I0123 08:37:01.809945 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hx9s4"] Jan 23 08:37:01 crc kubenswrapper[4711]: I0123 08:37:01.810170 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hx9s4" podUID="41085c31-2a7f-4717-8548-ff09637820e0" containerName="registry-server" containerID="cri-o://3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d" gracePeriod=2 Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.191387 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.212541 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41085c31-2a7f-4717-8548-ff09637820e0-utilities\") pod \"41085c31-2a7f-4717-8548-ff09637820e0\" (UID: \"41085c31-2a7f-4717-8548-ff09637820e0\") " Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.212659 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zlb6\" (UniqueName: \"kubernetes.io/projected/41085c31-2a7f-4717-8548-ff09637820e0-kube-api-access-7zlb6\") pod \"41085c31-2a7f-4717-8548-ff09637820e0\" (UID: \"41085c31-2a7f-4717-8548-ff09637820e0\") " Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.212700 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41085c31-2a7f-4717-8548-ff09637820e0-catalog-content\") pod \"41085c31-2a7f-4717-8548-ff09637820e0\" (UID: \"41085c31-2a7f-4717-8548-ff09637820e0\") " Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.213558 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41085c31-2a7f-4717-8548-ff09637820e0-utilities" (OuterVolumeSpecName: "utilities") pod "41085c31-2a7f-4717-8548-ff09637820e0" (UID: "41085c31-2a7f-4717-8548-ff09637820e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.218237 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41085c31-2a7f-4717-8548-ff09637820e0-kube-api-access-7zlb6" (OuterVolumeSpecName: "kube-api-access-7zlb6") pod "41085c31-2a7f-4717-8548-ff09637820e0" (UID: "41085c31-2a7f-4717-8548-ff09637820e0"). InnerVolumeSpecName "kube-api-access-7zlb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.267062 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41085c31-2a7f-4717-8548-ff09637820e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "41085c31-2a7f-4717-8548-ff09637820e0" (UID: "41085c31-2a7f-4717-8548-ff09637820e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.313613 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zlb6\" (UniqueName: \"kubernetes.io/projected/41085c31-2a7f-4717-8548-ff09637820e0-kube-api-access-7zlb6\") on node \"crc\" DevicePath \"\"" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.313650 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41085c31-2a7f-4717-8548-ff09637820e0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.313660 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41085c31-2a7f-4717-8548-ff09637820e0-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.753608 4711 generic.go:334] "Generic (PLEG): container finished" podID="41085c31-2a7f-4717-8548-ff09637820e0" containerID="3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d" exitCode=0 Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.754192 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hx9s4" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.757734 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hx9s4" event={"ID":"41085c31-2a7f-4717-8548-ff09637820e0","Type":"ContainerDied","Data":"3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d"} Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.757775 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hx9s4" event={"ID":"41085c31-2a7f-4717-8548-ff09637820e0","Type":"ContainerDied","Data":"4ddf184876ae6fa718dc0beaf45c057d6e1b07514a7a2842632575802ff73553"} Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.757797 4711 scope.go:117] "RemoveContainer" containerID="3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.777398 4711 scope.go:117] "RemoveContainer" containerID="660f98d6640ace7c3079ca363ccd45a301ea9ff992652037b87e651381f04f60" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.781709 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hx9s4"] Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.788750 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hx9s4"] Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.801680 4711 scope.go:117] "RemoveContainer" containerID="91386158a6dc7a96ebb3654d14fb2ef888255333c8df45cd9cabafcf5ffd2d97" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.815499 4711 scope.go:117] "RemoveContainer" containerID="3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d" Jan 23 08:37:02 crc kubenswrapper[4711]: E0123 08:37:02.815937 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d\": container with ID starting with 3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d not found: ID does not exist" containerID="3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.815984 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d"} err="failed to get container status \"3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d\": rpc error: code = NotFound desc = could not find container \"3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d\": container with ID starting with 3391f7bf5532e24227f11f6c810dfa6647fb4c12f704a3843d17a48b37ccf34d not found: ID does not exist" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.816021 4711 scope.go:117] "RemoveContainer" containerID="660f98d6640ace7c3079ca363ccd45a301ea9ff992652037b87e651381f04f60" Jan 23 08:37:02 crc kubenswrapper[4711]: E0123 08:37:02.816279 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"660f98d6640ace7c3079ca363ccd45a301ea9ff992652037b87e651381f04f60\": container with ID starting with 660f98d6640ace7c3079ca363ccd45a301ea9ff992652037b87e651381f04f60 not found: ID does not exist" containerID="660f98d6640ace7c3079ca363ccd45a301ea9ff992652037b87e651381f04f60" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.816310 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"660f98d6640ace7c3079ca363ccd45a301ea9ff992652037b87e651381f04f60"} err="failed to get container status \"660f98d6640ace7c3079ca363ccd45a301ea9ff992652037b87e651381f04f60\": rpc error: code = NotFound desc = could not find container \"660f98d6640ace7c3079ca363ccd45a301ea9ff992652037b87e651381f04f60\": container with ID starting with 660f98d6640ace7c3079ca363ccd45a301ea9ff992652037b87e651381f04f60 not found: ID does not exist" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.816334 4711 scope.go:117] "RemoveContainer" containerID="91386158a6dc7a96ebb3654d14fb2ef888255333c8df45cd9cabafcf5ffd2d97" Jan 23 08:37:02 crc kubenswrapper[4711]: E0123 08:37:02.816718 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91386158a6dc7a96ebb3654d14fb2ef888255333c8df45cd9cabafcf5ffd2d97\": container with ID starting with 91386158a6dc7a96ebb3654d14fb2ef888255333c8df45cd9cabafcf5ffd2d97 not found: ID does not exist" containerID="91386158a6dc7a96ebb3654d14fb2ef888255333c8df45cd9cabafcf5ffd2d97" Jan 23 08:37:02 crc kubenswrapper[4711]: I0123 08:37:02.816744 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91386158a6dc7a96ebb3654d14fb2ef888255333c8df45cd9cabafcf5ffd2d97"} err="failed to get container status \"91386158a6dc7a96ebb3654d14fb2ef888255333c8df45cd9cabafcf5ffd2d97\": rpc error: code = NotFound desc = could not find container \"91386158a6dc7a96ebb3654d14fb2ef888255333c8df45cd9cabafcf5ffd2d97\": container with ID starting with 91386158a6dc7a96ebb3654d14fb2ef888255333c8df45cd9cabafcf5ffd2d97 not found: ID does not exist" Jan 23 08:37:03 crc kubenswrapper[4711]: I0123 08:37:03.483780 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41085c31-2a7f-4717-8548-ff09637820e0" path="/var/lib/kubelet/pods/41085c31-2a7f-4717-8548-ff09637820e0/volumes" Jan 23 08:37:14 crc kubenswrapper[4711]: I0123 08:37:14.995458 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" Jan 23 08:37:25 crc kubenswrapper[4711]: I0123 08:37:25.994064 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:37:25 crc kubenswrapper[4711]: I0123 08:37:25.994629 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.289652 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9"] Jan 23 08:37:34 crc kubenswrapper[4711]: E0123 08:37:34.290461 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41085c31-2a7f-4717-8548-ff09637820e0" containerName="extract-content" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.290475 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="41085c31-2a7f-4717-8548-ff09637820e0" containerName="extract-content" Jan 23 08:37:34 crc kubenswrapper[4711]: E0123 08:37:34.290497 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41085c31-2a7f-4717-8548-ff09637820e0" containerName="extract-utilities" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.290527 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="41085c31-2a7f-4717-8548-ff09637820e0" containerName="extract-utilities" Jan 23 08:37:34 crc kubenswrapper[4711]: E0123 08:37:34.290542 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41085c31-2a7f-4717-8548-ff09637820e0" containerName="registry-server" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.290548 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="41085c31-2a7f-4717-8548-ff09637820e0" containerName="registry-server" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.290652 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="41085c31-2a7f-4717-8548-ff09637820e0" containerName="registry-server" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.291046 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.293065 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-qbvw2" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.301649 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.302357 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2" Jan 23 08:37:34 crc kubenswrapper[4711]: W0123 08:37:34.304825 4711 reflector.go:561] object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-xbhhw": failed to list *v1.Secret: secrets "cinder-operator-controller-manager-dockercfg-xbhhw" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Jan 23 08:37:34 crc kubenswrapper[4711]: E0123 08:37:34.304903 4711 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"cinder-operator-controller-manager-dockercfg-xbhhw\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"cinder-operator-controller-manager-dockercfg-xbhhw\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.322414 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.361425 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.363907 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.382485 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-8fmfn" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.427708 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.440408 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xzt2\" (UniqueName: \"kubernetes.io/projected/a7d6e419-04ce-4f5c-93a9-34d14a8c531a-kube-api-access-7xzt2\") pod \"barbican-operator-controller-manager-59dd8b7cbf-gw4s9\" (UID: \"a7d6e419-04ce-4f5c-93a9-34d14a8c531a\") " pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.440502 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p4wg\" (UniqueName: \"kubernetes.io/projected/0c4bb18f-fc6b-49ea-a9c3-971c666a935b-kube-api-access-2p4wg\") pod \"cinder-operator-controller-manager-69cf5d4557-kjjf2\" (UID: \"0c4bb18f-fc6b-49ea-a9c3-971c666a935b\") " pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.447362 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.448318 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.451906 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-7lhww" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.453412 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.454267 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.458645 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-6grmf" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.461700 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.478158 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.478914 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.483280 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-xn5sj" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.485384 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.495771 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.496932 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.500074 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-lt42q" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.502734 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.510894 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.526875 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.540234 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.541804 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xzt2\" (UniqueName: \"kubernetes.io/projected/a7d6e419-04ce-4f5c-93a9-34d14a8c531a-kube-api-access-7xzt2\") pod \"barbican-operator-controller-manager-59dd8b7cbf-gw4s9\" (UID: \"a7d6e419-04ce-4f5c-93a9-34d14a8c531a\") " pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.542027 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p4wg\" (UniqueName: \"kubernetes.io/projected/0c4bb18f-fc6b-49ea-a9c3-971c666a935b-kube-api-access-2p4wg\") pod \"cinder-operator-controller-manager-69cf5d4557-kjjf2\" (UID: \"0c4bb18f-fc6b-49ea-a9c3-971c666a935b\") " pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.542827 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzxbd\" (UniqueName: \"kubernetes.io/projected/c6c6b995-fa92-4cf2-87a1-361881e8c284-kube-api-access-nzxbd\") pod \"designate-operator-controller-manager-b45d7bf98-4pw4h\" (UID: \"c6c6b995-fa92-4cf2-87a1-361881e8c284\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.541976 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.548109 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-mr86x" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.555024 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.555960 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.564311 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.564520 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.564715 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-gdz2f" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.565488 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.568277 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-vnsdt" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.572668 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.580281 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.584991 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p4wg\" (UniqueName: \"kubernetes.io/projected/0c4bb18f-fc6b-49ea-a9c3-971c666a935b-kube-api-access-2p4wg\") pod \"cinder-operator-controller-manager-69cf5d4557-kjjf2\" (UID: \"0c4bb18f-fc6b-49ea-a9c3-971c666a935b\") " pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.594253 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.603528 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xzt2\" (UniqueName: \"kubernetes.io/projected/a7d6e419-04ce-4f5c-93a9-34d14a8c531a-kube-api-access-7xzt2\") pod \"barbican-operator-controller-manager-59dd8b7cbf-gw4s9\" (UID: \"a7d6e419-04ce-4f5c-93a9-34d14a8c531a\") " pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.613821 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.614875 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.616540 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.616607 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-vnv9r" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.617840 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.623177 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-bh5b6" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.634799 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.638612 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.644234 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kspv2\" (UniqueName: \"kubernetes.io/projected/7c54a5ef-3d58-4010-875b-8b6022692c7e-kube-api-access-kspv2\") pod \"heat-operator-controller-manager-594c8c9d5d-sk67x\" (UID: \"7c54a5ef-3d58-4010-875b-8b6022692c7e\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.644799 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5ls4\" (UniqueName: \"kubernetes.io/projected/f57c2bcd-cd26-420c-a3f9-64b5d4d1a916-kube-api-access-j5ls4\") pod \"keystone-operator-controller-manager-b8b6d4659-9fsww\" (UID: \"f57c2bcd-cd26-420c-a3f9-64b5d4d1a916\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.644888 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzxbd\" (UniqueName: \"kubernetes.io/projected/c6c6b995-fa92-4cf2-87a1-361881e8c284-kube-api-access-nzxbd\") pod \"designate-operator-controller-manager-b45d7bf98-4pw4h\" (UID: \"c6c6b995-fa92-4cf2-87a1-361881e8c284\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.644923 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76czs\" (UniqueName: \"kubernetes.io/projected/cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed-kube-api-access-76czs\") pod \"horizon-operator-controller-manager-77d5c5b54f-v7tnr\" (UID: \"cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.644950 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdmxs\" (UniqueName: \"kubernetes.io/projected/5ad48aae-ab84-4c93-9d0e-cb4fd24884dd-kube-api-access-bdmxs\") pod \"ironic-operator-controller-manager-69d6c9f5b8-ghlgc\" (UID: \"5ad48aae-ab84-4c93-9d0e-cb4fd24884dd\") " pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.644971 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sc77l\" (UniqueName: \"kubernetes.io/projected/e609d803-23cf-4d04-8587-bdd492c4c4bd-kube-api-access-sc77l\") pod \"glance-operator-controller-manager-78fdd796fd-62nhb\" (UID: \"e609d803-23cf-4d04-8587-bdd492c4c4bd\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.658295 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.668798 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.673322 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-mvc9b" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.680681 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.682587 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzxbd\" (UniqueName: \"kubernetes.io/projected/c6c6b995-fa92-4cf2-87a1-361881e8c284-kube-api-access-nzxbd\") pod \"designate-operator-controller-manager-b45d7bf98-4pw4h\" (UID: \"c6c6b995-fa92-4cf2-87a1-361881e8c284\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.687027 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.695452 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.702994 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-4nnzp" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.713049 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.717292 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.724380 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.725358 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.731112 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-qkncp" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.741347 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.746686 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76czs\" (UniqueName: \"kubernetes.io/projected/cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed-kube-api-access-76czs\") pod \"horizon-operator-controller-manager-77d5c5b54f-v7tnr\" (UID: \"cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.746734 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdmxs\" (UniqueName: \"kubernetes.io/projected/5ad48aae-ab84-4c93-9d0e-cb4fd24884dd-kube-api-access-bdmxs\") pod \"ironic-operator-controller-manager-69d6c9f5b8-ghlgc\" (UID: \"5ad48aae-ab84-4c93-9d0e-cb4fd24884dd\") " pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.746761 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sc77l\" (UniqueName: \"kubernetes.io/projected/e609d803-23cf-4d04-8587-bdd492c4c4bd-kube-api-access-sc77l\") pod \"glance-operator-controller-manager-78fdd796fd-62nhb\" (UID: \"e609d803-23cf-4d04-8587-bdd492c4c4bd\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.746797 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kspv2\" (UniqueName: \"kubernetes.io/projected/7c54a5ef-3d58-4010-875b-8b6022692c7e-kube-api-access-kspv2\") pod \"heat-operator-controller-manager-594c8c9d5d-sk67x\" (UID: \"7c54a5ef-3d58-4010-875b-8b6022692c7e\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.746843 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5ls4\" (UniqueName: \"kubernetes.io/projected/f57c2bcd-cd26-420c-a3f9-64b5d4d1a916-kube-api-access-j5ls4\") pod \"keystone-operator-controller-manager-b8b6d4659-9fsww\" (UID: \"f57c2bcd-cd26-420c-a3f9-64b5d4d1a916\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.746876 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4sbk\" (UniqueName: \"kubernetes.io/projected/467b6d38-a02c-44f9-81bf-3bda90dc4efd-kube-api-access-f4sbk\") pod \"infra-operator-controller-manager-54ccf4f85d-vs5n6\" (UID: \"467b6d38-a02c-44f9-81bf-3bda90dc4efd\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.746911 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4dc9\" (UniqueName: \"kubernetes.io/projected/bfc822a0-472c-4e41-99c9-35605ebea5c6-kube-api-access-d4dc9\") pod \"mariadb-operator-controller-manager-c87fff755-bjrrb\" (UID: \"bfc822a0-472c-4e41-99c9-35605ebea5c6\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.746934 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-vs5n6\" (UID: \"467b6d38-a02c-44f9-81bf-3bda90dc4efd\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.746980 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhrcd\" (UniqueName: \"kubernetes.io/projected/b764e900-9f5f-49e6-b6a8-8ad55007cc54-kube-api-access-nhrcd\") pod \"neutron-operator-controller-manager-5d8f59fb49-42kx2\" (UID: \"b764e900-9f5f-49e6-b6a8-8ad55007cc54\") " pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.747009 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn8bh\" (UniqueName: \"kubernetes.io/projected/effbd1c1-9a1a-4a5c-9955-6a1005746383-kube-api-access-nn8bh\") pod \"manila-operator-controller-manager-78c6999f6f-8ptr2\" (UID: \"effbd1c1-9a1a-4a5c-9955-6a1005746383\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.751735 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.769559 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.770386 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.775151 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.775753 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-jcqxc" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.777530 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kspv2\" (UniqueName: \"kubernetes.io/projected/7c54a5ef-3d58-4010-875b-8b6022692c7e-kube-api-access-kspv2\") pod \"heat-operator-controller-manager-594c8c9d5d-sk67x\" (UID: \"7c54a5ef-3d58-4010-875b-8b6022692c7e\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.803552 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdmxs\" (UniqueName: \"kubernetes.io/projected/5ad48aae-ab84-4c93-9d0e-cb4fd24884dd-kube-api-access-bdmxs\") pod \"ironic-operator-controller-manager-69d6c9f5b8-ghlgc\" (UID: \"5ad48aae-ab84-4c93-9d0e-cb4fd24884dd\") " pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.815299 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sc77l\" (UniqueName: \"kubernetes.io/projected/e609d803-23cf-4d04-8587-bdd492c4c4bd-kube-api-access-sc77l\") pod \"glance-operator-controller-manager-78fdd796fd-62nhb\" (UID: \"e609d803-23cf-4d04-8587-bdd492c4c4bd\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.816315 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5ls4\" (UniqueName: \"kubernetes.io/projected/f57c2bcd-cd26-420c-a3f9-64b5d4d1a916-kube-api-access-j5ls4\") pod \"keystone-operator-controller-manager-b8b6d4659-9fsww\" (UID: \"f57c2bcd-cd26-420c-a3f9-64b5d4d1a916\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.821458 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76czs\" (UniqueName: \"kubernetes.io/projected/cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed-kube-api-access-76czs\") pod \"horizon-operator-controller-manager-77d5c5b54f-v7tnr\" (UID: \"cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.822056 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.835841 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.851752 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpzpp\" (UniqueName: \"kubernetes.io/projected/20db23eb-eb12-458a-9c9e-164f8e3bcab7-kube-api-access-lpzpp\") pod \"nova-operator-controller-manager-5db5449586-d5x8p\" (UID: \"20db23eb-eb12-458a-9c9e-164f8e3bcab7\") " pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.852128 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4sbk\" (UniqueName: \"kubernetes.io/projected/467b6d38-a02c-44f9-81bf-3bda90dc4efd-kube-api-access-f4sbk\") pod \"infra-operator-controller-manager-54ccf4f85d-vs5n6\" (UID: \"467b6d38-a02c-44f9-81bf-3bda90dc4efd\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.852165 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4dc9\" (UniqueName: \"kubernetes.io/projected/bfc822a0-472c-4e41-99c9-35605ebea5c6-kube-api-access-d4dc9\") pod \"mariadb-operator-controller-manager-c87fff755-bjrrb\" (UID: \"bfc822a0-472c-4e41-99c9-35605ebea5c6\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.852186 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-vs5n6\" (UID: \"467b6d38-a02c-44f9-81bf-3bda90dc4efd\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.852209 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bh27\" (UniqueName: \"kubernetes.io/projected/f1b9f385-b045-407d-a56c-87750c1c5972-kube-api-access-5bh27\") pod \"octavia-operator-controller-manager-7bd9774b6-cmn6r\" (UID: \"f1b9f385-b045-407d-a56c-87750c1c5972\") " pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.852245 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bn5r8\" (UniqueName: \"kubernetes.io/projected/0b81979e-a44a-40d2-8eff-958e528d95a1-kube-api-access-bn5r8\") pod \"ovn-operator-controller-manager-55db956ddc-plrlz\" (UID: \"0b81979e-a44a-40d2-8eff-958e528d95a1\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.852270 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhrcd\" (UniqueName: \"kubernetes.io/projected/b764e900-9f5f-49e6-b6a8-8ad55007cc54-kube-api-access-nhrcd\") pod \"neutron-operator-controller-manager-5d8f59fb49-42kx2\" (UID: \"b764e900-9f5f-49e6-b6a8-8ad55007cc54\") " pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.852293 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn8bh\" (UniqueName: \"kubernetes.io/projected/effbd1c1-9a1a-4a5c-9955-6a1005746383-kube-api-access-nn8bh\") pod \"manila-operator-controller-manager-78c6999f6f-8ptr2\" (UID: \"effbd1c1-9a1a-4a5c-9955-6a1005746383\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2" Jan 23 08:37:34 crc kubenswrapper[4711]: E0123 08:37:34.852973 4711 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 23 08:37:34 crc kubenswrapper[4711]: E0123 08:37:34.853017 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert podName:467b6d38-a02c-44f9-81bf-3bda90dc4efd nodeName:}" failed. No retries permitted until 2026-01-23 08:37:35.353002156 +0000 UTC m=+1040.925958514 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert") pod "infra-operator-controller-manager-54ccf4f85d-vs5n6" (UID: "467b6d38-a02c-44f9-81bf-3bda90dc4efd") : secret "infra-operator-webhook-server-cert" not found Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.896241 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.901155 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.902040 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.906268 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-kzrmm" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.918826 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.919376 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4dc9\" (UniqueName: \"kubernetes.io/projected/bfc822a0-472c-4e41-99c9-35605ebea5c6-kube-api-access-d4dc9\") pod \"mariadb-operator-controller-manager-c87fff755-bjrrb\" (UID: \"bfc822a0-472c-4e41-99c9-35605ebea5c6\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.922532 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.926887 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-fwjq9" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.935322 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.939371 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhrcd\" (UniqueName: \"kubernetes.io/projected/b764e900-9f5f-49e6-b6a8-8ad55007cc54-kube-api-access-nhrcd\") pod \"neutron-operator-controller-manager-5d8f59fb49-42kx2\" (UID: \"b764e900-9f5f-49e6-b6a8-8ad55007cc54\") " pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.944175 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn8bh\" (UniqueName: \"kubernetes.io/projected/effbd1c1-9a1a-4a5c-9955-6a1005746383-kube-api-access-nn8bh\") pod \"manila-operator-controller-manager-78c6999f6f-8ptr2\" (UID: \"effbd1c1-9a1a-4a5c-9955-6a1005746383\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.944186 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4sbk\" (UniqueName: \"kubernetes.io/projected/467b6d38-a02c-44f9-81bf-3bda90dc4efd-kube-api-access-f4sbk\") pod \"infra-operator-controller-manager-54ccf4f85d-vs5n6\" (UID: \"467b6d38-a02c-44f9-81bf-3bda90dc4efd\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.953158 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.954250 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bn5r8\" (UniqueName: \"kubernetes.io/projected/0b81979e-a44a-40d2-8eff-958e528d95a1-kube-api-access-bn5r8\") pod \"ovn-operator-controller-manager-55db956ddc-plrlz\" (UID: \"0b81979e-a44a-40d2-8eff-958e528d95a1\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.954395 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854q774f\" (UID: \"e275a9a3-3a29-498a-bea9-b545730a0301\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.954609 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpzpp\" (UniqueName: \"kubernetes.io/projected/20db23eb-eb12-458a-9c9e-164f8e3bcab7-kube-api-access-lpzpp\") pod \"nova-operator-controller-manager-5db5449586-d5x8p\" (UID: \"20db23eb-eb12-458a-9c9e-164f8e3bcab7\") " pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.954843 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scl8c\" (UniqueName: \"kubernetes.io/projected/e275a9a3-3a29-498a-bea9-b545730a0301-kube-api-access-scl8c\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854q774f\" (UID: \"e275a9a3-3a29-498a-bea9-b545730a0301\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.954963 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bh27\" (UniqueName: \"kubernetes.io/projected/f1b9f385-b045-407d-a56c-87750c1c5972-kube-api-access-5bh27\") pod \"octavia-operator-controller-manager-7bd9774b6-cmn6r\" (UID: \"f1b9f385-b045-407d-a56c-87750c1c5972\") " pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.955523 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.988187 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6"] Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.989217 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.993982 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-46fsl" Jan 23 08:37:34 crc kubenswrapper[4711]: I0123 08:37:34.995689 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bn5r8\" (UniqueName: \"kubernetes.io/projected/0b81979e-a44a-40d2-8eff-958e528d95a1-kube-api-access-bn5r8\") pod \"ovn-operator-controller-manager-55db956ddc-plrlz\" (UID: \"0b81979e-a44a-40d2-8eff-958e528d95a1\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.002365 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6"] Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.017327 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bh27\" (UniqueName: \"kubernetes.io/projected/f1b9f385-b045-407d-a56c-87750c1c5972-kube-api-access-5bh27\") pod \"octavia-operator-controller-manager-7bd9774b6-cmn6r\" (UID: \"f1b9f385-b045-407d-a56c-87750c1c5972\") " pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.019819 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpzpp\" (UniqueName: \"kubernetes.io/projected/20db23eb-eb12-458a-9c9e-164f8e3bcab7-kube-api-access-lpzpp\") pod \"nova-operator-controller-manager-5db5449586-d5x8p\" (UID: \"20db23eb-eb12-458a-9c9e-164f8e3bcab7\") " pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.023618 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv"] Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.024779 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.027461 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.035623 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.035866 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-zmqkv" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.044705 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.055758 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv"] Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.058528 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scl8c\" (UniqueName: \"kubernetes.io/projected/e275a9a3-3a29-498a-bea9-b545730a0301-kube-api-access-scl8c\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854q774f\" (UID: \"e275a9a3-3a29-498a-bea9-b545730a0301\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.058782 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wb79s\" (UniqueName: \"kubernetes.io/projected/366f8b54-0e7e-4d75-9c62-d174624512e4-kube-api-access-wb79s\") pod \"swift-operator-controller-manager-547cbdb99f-4lhtg\" (UID: \"366f8b54-0e7e-4d75-9c62-d174624512e4\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.058920 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csr5q\" (UniqueName: \"kubernetes.io/projected/9eb50a74-06d4-4b43-a0b9-245354b3cde7-kube-api-access-csr5q\") pod \"placement-operator-controller-manager-5d646b7d76-ctqxt\" (UID: \"9eb50a74-06d4-4b43-a0b9-245354b3cde7\") " pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.059035 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854q774f\" (UID: \"e275a9a3-3a29-498a-bea9-b545730a0301\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:37:35 crc kubenswrapper[4711]: E0123 08:37:35.059233 4711 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 23 08:37:35 crc kubenswrapper[4711]: E0123 08:37:35.059344 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert podName:e275a9a3-3a29-498a-bea9-b545730a0301 nodeName:}" failed. No retries permitted until 2026-01-23 08:37:35.559327854 +0000 UTC m=+1041.132284222 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854q774f" (UID: "e275a9a3-3a29-498a-bea9-b545730a0301") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.071023 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.081732 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb"] Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.082745 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.095175 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.100136 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb"] Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.102980 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-c5bph" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.114369 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.115930 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.122709 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scl8c\" (UniqueName: \"kubernetes.io/projected/e275a9a3-3a29-498a-bea9-b545730a0301-kube-api-access-scl8c\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854q774f\" (UID: \"e275a9a3-3a29-498a-bea9-b545730a0301\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.166088 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz49f\" (UniqueName: \"kubernetes.io/projected/a1ec006d-63fd-4fac-b5f5-df222bab8638-kube-api-access-xz49f\") pod \"test-operator-controller-manager-69797bbcbd-6wgwv\" (UID: \"a1ec006d-63fd-4fac-b5f5-df222bab8638\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.166341 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wb79s\" (UniqueName: \"kubernetes.io/projected/366f8b54-0e7e-4d75-9c62-d174624512e4-kube-api-access-wb79s\") pod \"swift-operator-controller-manager-547cbdb99f-4lhtg\" (UID: \"366f8b54-0e7e-4d75-9c62-d174624512e4\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.166467 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csr5q\" (UniqueName: \"kubernetes.io/projected/9eb50a74-06d4-4b43-a0b9-245354b3cde7-kube-api-access-csr5q\") pod \"placement-operator-controller-manager-5d646b7d76-ctqxt\" (UID: \"9eb50a74-06d4-4b43-a0b9-245354b3cde7\") " pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.166624 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlgh7\" (UniqueName: \"kubernetes.io/projected/793a9ab1-c243-4a56-9463-3be8147bff44-kube-api-access-tlgh7\") pod \"telemetry-operator-controller-manager-85cd9769bb-qnzz6\" (UID: \"793a9ab1-c243-4a56-9463-3be8147bff44\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.166730 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6qpd\" (UniqueName: \"kubernetes.io/projected/458cb03b-3e35-4219-8009-08829d99da25-kube-api-access-n6qpd\") pod \"watcher-operator-controller-manager-5ffb9c6597-lvcpb\" (UID: \"458cb03b-3e35-4219-8009-08829d99da25\") " pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.202309 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t"] Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.203852 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.218034 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csr5q\" (UniqueName: \"kubernetes.io/projected/9eb50a74-06d4-4b43-a0b9-245354b3cde7-kube-api-access-csr5q\") pod \"placement-operator-controller-manager-5d646b7d76-ctqxt\" (UID: \"9eb50a74-06d4-4b43-a0b9-245354b3cde7\") " pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.228355 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wb79s\" (UniqueName: \"kubernetes.io/projected/366f8b54-0e7e-4d75-9c62-d174624512e4-kube-api-access-wb79s\") pod \"swift-operator-controller-manager-547cbdb99f-4lhtg\" (UID: \"366f8b54-0e7e-4d75-9c62-d174624512e4\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.233065 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.233337 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.233579 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-wnwds" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.239287 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t"] Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.272827 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlgh7\" (UniqueName: \"kubernetes.io/projected/793a9ab1-c243-4a56-9463-3be8147bff44-kube-api-access-tlgh7\") pod \"telemetry-operator-controller-manager-85cd9769bb-qnzz6\" (UID: \"793a9ab1-c243-4a56-9463-3be8147bff44\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.272937 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6qpd\" (UniqueName: \"kubernetes.io/projected/458cb03b-3e35-4219-8009-08829d99da25-kube-api-access-n6qpd\") pod \"watcher-operator-controller-manager-5ffb9c6597-lvcpb\" (UID: \"458cb03b-3e35-4219-8009-08829d99da25\") " pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.273045 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz49f\" (UniqueName: \"kubernetes.io/projected/a1ec006d-63fd-4fac-b5f5-df222bab8638-kube-api-access-xz49f\") pod \"test-operator-controller-manager-69797bbcbd-6wgwv\" (UID: \"a1ec006d-63fd-4fac-b5f5-df222bab8638\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.274321 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.312445 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlgh7\" (UniqueName: \"kubernetes.io/projected/793a9ab1-c243-4a56-9463-3be8147bff44-kube-api-access-tlgh7\") pod \"telemetry-operator-controller-manager-85cd9769bb-qnzz6\" (UID: \"793a9ab1-c243-4a56-9463-3be8147bff44\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.327914 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9"] Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.331698 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz49f\" (UniqueName: \"kubernetes.io/projected/a1ec006d-63fd-4fac-b5f5-df222bab8638-kube-api-access-xz49f\") pod \"test-operator-controller-manager-69797bbcbd-6wgwv\" (UID: \"a1ec006d-63fd-4fac-b5f5-df222bab8638\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.338191 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6qpd\" (UniqueName: \"kubernetes.io/projected/458cb03b-3e35-4219-8009-08829d99da25-kube-api-access-n6qpd\") pod \"watcher-operator-controller-manager-5ffb9c6597-lvcpb\" (UID: \"458cb03b-3e35-4219-8009-08829d99da25\") " pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.359132 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.376902 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-vs5n6\" (UID: \"467b6d38-a02c-44f9-81bf-3bda90dc4efd\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.376985 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.377055 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l9mr\" (UniqueName: \"kubernetes.io/projected/72674af2-3b9e-47e6-8417-bee428fe826a-kube-api-access-5l9mr\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.377090 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:35 crc kubenswrapper[4711]: E0123 08:37:35.377226 4711 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 23 08:37:35 crc kubenswrapper[4711]: E0123 08:37:35.377285 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert podName:467b6d38-a02c-44f9-81bf-3bda90dc4efd nodeName:}" failed. No retries permitted until 2026-01-23 08:37:36.377263872 +0000 UTC m=+1041.950220240 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert") pod "infra-operator-controller-manager-54ccf4f85d-vs5n6" (UID: "467b6d38-a02c-44f9-81bf-3bda90dc4efd") : secret "infra-operator-webhook-server-cert" not found Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.383673 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv"] Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.385330 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.390069 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-wm7hg" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.390810 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-xbhhw" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.390964 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.398022 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.406408 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv"] Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.415415 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.451236 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.451670 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.485811 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.486262 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw685\" (UniqueName: \"kubernetes.io/projected/5726b969-f5cb-4e58-9e8f-92c001f4a7be-kube-api-access-bw685\") pod \"rabbitmq-cluster-operator-manager-668c99d594-hwttv\" (UID: \"5726b969-f5cb-4e58-9e8f-92c001f4a7be\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.486464 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l9mr\" (UniqueName: \"kubernetes.io/projected/72674af2-3b9e-47e6-8417-bee428fe826a-kube-api-access-5l9mr\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.486620 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:35 crc kubenswrapper[4711]: E0123 08:37:35.487109 4711 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 23 08:37:35 crc kubenswrapper[4711]: E0123 08:37:35.491126 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs podName:72674af2-3b9e-47e6-8417-bee428fe826a nodeName:}" failed. No retries permitted until 2026-01-23 08:37:35.991100748 +0000 UTC m=+1041.564057116 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs") pod "openstack-operator-controller-manager-6998f5c585-t278t" (UID: "72674af2-3b9e-47e6-8417-bee428fe826a") : secret "webhook-server-cert" not found Jan 23 08:37:35 crc kubenswrapper[4711]: E0123 08:37:35.488943 4711 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 23 08:37:35 crc kubenswrapper[4711]: E0123 08:37:35.491841 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs podName:72674af2-3b9e-47e6-8417-bee428fe826a nodeName:}" failed. No retries permitted until 2026-01-23 08:37:35.991829515 +0000 UTC m=+1041.564785883 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs") pod "openstack-operator-controller-manager-6998f5c585-t278t" (UID: "72674af2-3b9e-47e6-8417-bee428fe826a") : secret "metrics-server-cert" not found Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.493910 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.571605 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l9mr\" (UniqueName: \"kubernetes.io/projected/72674af2-3b9e-47e6-8417-bee428fe826a-kube-api-access-5l9mr\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.588151 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw685\" (UniqueName: \"kubernetes.io/projected/5726b969-f5cb-4e58-9e8f-92c001f4a7be-kube-api-access-bw685\") pod \"rabbitmq-cluster-operator-manager-668c99d594-hwttv\" (UID: \"5726b969-f5cb-4e58-9e8f-92c001f4a7be\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.588290 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854q774f\" (UID: \"e275a9a3-3a29-498a-bea9-b545730a0301\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:37:35 crc kubenswrapper[4711]: E0123 08:37:35.588989 4711 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 23 08:37:35 crc kubenswrapper[4711]: E0123 08:37:35.589053 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert podName:e275a9a3-3a29-498a-bea9-b545730a0301 nodeName:}" failed. No retries permitted until 2026-01-23 08:37:36.589034985 +0000 UTC m=+1042.161991353 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854q774f" (UID: "e275a9a3-3a29-498a-bea9-b545730a0301") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.599366 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h"] Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.649858 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw685\" (UniqueName: \"kubernetes.io/projected/5726b969-f5cb-4e58-9e8f-92c001f4a7be-kube-api-access-bw685\") pod \"rabbitmq-cluster-operator-manager-668c99d594-hwttv\" (UID: \"5726b969-f5cb-4e58-9e8f-92c001f4a7be\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.874970 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv" Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.994124 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h" event={"ID":"c6c6b995-fa92-4cf2-87a1-361881e8c284","Type":"ContainerStarted","Data":"f5d2688ecc1939175b855c660788e282dd10bd3504b2e55aecde741452100548"} Jan 23 08:37:35 crc kubenswrapper[4711]: I0123 08:37:35.995411 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9" event={"ID":"a7d6e419-04ce-4f5c-93a9-34d14a8c531a","Type":"ContainerStarted","Data":"2a9089023bb661e46784fd015e5906ddd9f1e8e0abf0cffd920462ade8261c71"} Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.001722 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.001825 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.002009 4711 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.002097 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs podName:72674af2-3b9e-47e6-8417-bee428fe826a nodeName:}" failed. No retries permitted until 2026-01-23 08:37:37.00207961 +0000 UTC m=+1042.575035988 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs") pod "openstack-operator-controller-manager-6998f5c585-t278t" (UID: "72674af2-3b9e-47e6-8417-bee428fe826a") : secret "webhook-server-cert" not found Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.002540 4711 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.002581 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs podName:72674af2-3b9e-47e6-8417-bee428fe826a nodeName:}" failed. No retries permitted until 2026-01-23 08:37:37.002571042 +0000 UTC m=+1042.575527400 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs") pod "openstack-operator-controller-manager-6998f5c585-t278t" (UID: "72674af2-3b9e-47e6-8417-bee428fe826a") : secret "metrics-server-cert" not found Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.408381 4711 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.408459 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert podName:467b6d38-a02c-44f9-81bf-3bda90dc4efd nodeName:}" failed. No retries permitted until 2026-01-23 08:37:38.408440622 +0000 UTC m=+1043.981396990 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert") pod "infra-operator-controller-manager-54ccf4f85d-vs5n6" (UID: "467b6d38-a02c-44f9-81bf-3bda90dc4efd") : secret "infra-operator-webhook-server-cert" not found Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.408892 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-vs5n6\" (UID: \"467b6d38-a02c-44f9-81bf-3bda90dc4efd\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.441725 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww"] Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.451800 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb"] Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.462586 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2"] Jan 23 08:37:36 crc kubenswrapper[4711]: W0123 08:37:36.471749 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbfc822a0_472c_4e41_99c9_35605ebea5c6.slice/crio-72aecb39297a7c702947bd7cbcca907fc717690c0e8e9bd5199a0f1bc9a9f973 WatchSource:0}: Error finding container 72aecb39297a7c702947bd7cbcca907fc717690c0e8e9bd5199a0f1bc9a9f973: Status 404 returned error can't find the container with id 72aecb39297a7c702947bd7cbcca907fc717690c0e8e9bd5199a0f1bc9a9f973 Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.488589 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz"] Jan 23 08:37:36 crc kubenswrapper[4711]: W0123 08:37:36.502988 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b81979e_a44a_40d2_8eff_958e528d95a1.slice/crio-a58c6c62997b4437ac82e879718a858238bc8fb80d3ca616abf01662ac771301 WatchSource:0}: Error finding container a58c6c62997b4437ac82e879718a858238bc8fb80d3ca616abf01662ac771301: Status 404 returned error can't find the container with id a58c6c62997b4437ac82e879718a858238bc8fb80d3ca616abf01662ac771301 Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.518312 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc"] Jan 23 08:37:36 crc kubenswrapper[4711]: W0123 08:37:36.521092 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ad48aae_ab84_4c93_9d0e_cb4fd24884dd.slice/crio-2bdc1e653036996bfbc6772962fda4775366806a05bfb82abfc86fd71d2ad2f8 WatchSource:0}: Error finding container 2bdc1e653036996bfbc6772962fda4775366806a05bfb82abfc86fd71d2ad2f8: Status 404 returned error can't find the container with id 2bdc1e653036996bfbc6772962fda4775366806a05bfb82abfc86fd71d2ad2f8 Jan 23 08:37:36 crc kubenswrapper[4711]: W0123 08:37:36.540397 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c54a5ef_3d58_4010_875b_8b6022692c7e.slice/crio-6cb54b8018aeca32466ee46ff9c53fec90e87a2a2fa7c6b1dee23e2b37a8013a WatchSource:0}: Error finding container 6cb54b8018aeca32466ee46ff9c53fec90e87a2a2fa7c6b1dee23e2b37a8013a: Status 404 returned error can't find the container with id 6cb54b8018aeca32466ee46ff9c53fec90e87a2a2fa7c6b1dee23e2b37a8013a Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.543656 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr"] Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.552754 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r"] Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.558442 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x"] Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.611383 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854q774f\" (UID: \"e275a9a3-3a29-498a-bea9-b545730a0301\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.611542 4711 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.611693 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert podName:e275a9a3-3a29-498a-bea9-b545730a0301 nodeName:}" failed. No retries permitted until 2026-01-23 08:37:38.611673885 +0000 UTC m=+1044.184630253 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854q774f" (UID: "e275a9a3-3a29-498a-bea9-b545730a0301") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.668765 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2"] Jan 23 08:37:36 crc kubenswrapper[4711]: W0123 08:37:36.675957 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c4bb18f_fc6b_49ea_a9c3_971c666a935b.slice/crio-74d078bd609ce690124c029ba861ac263d1085f0f955c551d0eba5d4fd3e9e78 WatchSource:0}: Error finding container 74d078bd609ce690124c029ba861ac263d1085f0f955c551d0eba5d4fd3e9e78: Status 404 returned error can't find the container with id 74d078bd609ce690124c029ba861ac263d1085f0f955c551d0eba5d4fd3e9e78 Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.859307 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt"] Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.865287 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv"] Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.875458 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb"] Jan 23 08:37:36 crc kubenswrapper[4711]: W0123 08:37:36.875643 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5726b969_f5cb_4e58_9e8f_92c001f4a7be.slice/crio-a5ee41ee86fd8cd2b1ac9df08050f527e29ac583d4e4249379cceabb757814f5 WatchSource:0}: Error finding container a5ee41ee86fd8cd2b1ac9df08050f527e29ac583d4e4249379cceabb757814f5: Status 404 returned error can't find the container with id a5ee41ee86fd8cd2b1ac9df08050f527e29ac583d4e4249379cceabb757814f5 Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.879465 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p"] Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.888297 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bw685,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-hwttv_openstack-operators(5726b969-f5cb-4e58-9e8f-92c001f4a7be): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.889681 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv" podUID="5726b969-f5cb-4e58-9e8f-92c001f4a7be" Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.896934 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6"] Jan 23 08:37:36 crc kubenswrapper[4711]: W0123 08:37:36.901444 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod366f8b54_0e7e_4d75_9c62_d174624512e4.slice/crio-350a5d69544ca8dfa47b7ad19100d853b9a282279839ab514e277ada3f9a1a42 WatchSource:0}: Error finding container 350a5d69544ca8dfa47b7ad19100d853b9a282279839ab514e277ada3f9a1a42: Status 404 returned error can't find the container with id 350a5d69544ca8dfa47b7ad19100d853b9a282279839ab514e277ada3f9a1a42 Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.904828 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb"] Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.907721 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wb79s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-547cbdb99f-4lhtg_openstack-operators(366f8b54-0e7e-4d75-9c62-d174624512e4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.908796 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" podUID="366f8b54-0e7e-4d75-9c62-d174624512e4" Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.913976 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg"] Jan 23 08:37:36 crc kubenswrapper[4711]: W0123 08:37:36.918082 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod793a9ab1_c243_4a56_9463_3be8147bff44.slice/crio-ab47b3b11e52a42753ea41c629ec4cb736636b2a024e611bf9ff8326119f5c9b WatchSource:0}: Error finding container ab47b3b11e52a42753ea41c629ec4cb736636b2a024e611bf9ff8326119f5c9b: Status 404 returned error can't find the container with id ab47b3b11e52a42753ea41c629ec4cb736636b2a024e611bf9ff8326119f5c9b Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.918282 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:9caae9b3ee328df678baa26454e45e47693acdadb27f9c635680597aaec43337,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sc77l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-78fdd796fd-62nhb_openstack-operators(e609d803-23cf-4d04-8587-bdd492c4c4bd): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.919556 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" podUID="e609d803-23cf-4d04-8587-bdd492c4c4bd" Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.923032 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tlgh7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-85cd9769bb-qnzz6_openstack-operators(793a9ab1-c243-4a56-9463-3be8147bff44): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.923558 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv"] Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.924139 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" podUID="793a9ab1-c243-4a56-9463-3be8147bff44" Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.924140 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xz49f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-69797bbcbd-6wgwv_openstack-operators(a1ec006d-63fd-4fac-b5f5-df222bab8638): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 23 08:37:36 crc kubenswrapper[4711]: W0123 08:37:36.924492 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb764e900_9f5f_49e6_b6a8_8ad55007cc54.slice/crio-49ebaca64020f4b8336f8237663838c1d10fffe946f536cb10a180fa0cc735e2 WatchSource:0}: Error finding container 49ebaca64020f4b8336f8237663838c1d10fffe946f536cb10a180fa0cc735e2: Status 404 returned error can't find the container with id 49ebaca64020f4b8336f8237663838c1d10fffe946f536cb10a180fa0cc735e2 Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.925316 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" podUID="a1ec006d-63fd-4fac-b5f5-df222bab8638" Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.927133 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:b57d65d2a968705b9067192a7cb33bd4a12489db87e1d05de78c076f2062cab4,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nhrcd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5d8f59fb49-42kx2_openstack-operators(b764e900-9f5f-49e6-b6a8-8ad55007cc54): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 23 08:37:36 crc kubenswrapper[4711]: E0123 08:37:36.928277 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" podUID="b764e900-9f5f-49e6-b6a8-8ad55007cc54" Jan 23 08:37:36 crc kubenswrapper[4711]: I0123 08:37:36.932976 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2"] Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.004117 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2" event={"ID":"effbd1c1-9a1a-4a5c-9955-6a1005746383","Type":"ContainerStarted","Data":"7c02d5ef0a2fd4571a5c660dbaae466a09d2b45fe4111feee2dbce0dc1769579"} Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.005417 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x" event={"ID":"7c54a5ef-3d58-4010-875b-8b6022692c7e","Type":"ContainerStarted","Data":"6cb54b8018aeca32466ee46ff9c53fec90e87a2a2fa7c6b1dee23e2b37a8013a"} Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.006452 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r" event={"ID":"f1b9f385-b045-407d-a56c-87750c1c5972","Type":"ContainerStarted","Data":"4036da4371006d4e47f5fd3c6b8c6c2725ceb3adda7188dc56ec96833d7ef336"} Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.007448 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz" event={"ID":"0b81979e-a44a-40d2-8eff-958e528d95a1","Type":"ContainerStarted","Data":"a58c6c62997b4437ac82e879718a858238bc8fb80d3ca616abf01662ac771301"} Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.008588 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" event={"ID":"a1ec006d-63fd-4fac-b5f5-df222bab8638","Type":"ContainerStarted","Data":"c29dae702919cc11f3980e257bb5d40a0832c559244bd513f1a343118a475d88"} Jan 23 08:37:37 crc kubenswrapper[4711]: E0123 08:37:37.009713 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d\\\"\"" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" podUID="a1ec006d-63fd-4fac-b5f5-df222bab8638" Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.010471 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" event={"ID":"793a9ab1-c243-4a56-9463-3be8147bff44","Type":"ContainerStarted","Data":"ab47b3b11e52a42753ea41c629ec4cb736636b2a024e611bf9ff8326119f5c9b"} Jan 23 08:37:37 crc kubenswrapper[4711]: E0123 08:37:37.012872 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" podUID="793a9ab1-c243-4a56-9463-3be8147bff44" Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.013484 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt" event={"ID":"9eb50a74-06d4-4b43-a0b9-245354b3cde7","Type":"ContainerStarted","Data":"2e16d20e146e27e754c95e53493e9f0b7cddaeb997f14c511bd74d4840c5fbc9"} Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.015324 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" event={"ID":"b764e900-9f5f-49e6-b6a8-8ad55007cc54","Type":"ContainerStarted","Data":"49ebaca64020f4b8336f8237663838c1d10fffe946f536cb10a180fa0cc735e2"} Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.016271 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.016389 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.016402 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb" event={"ID":"458cb03b-3e35-4219-8009-08829d99da25","Type":"ContainerStarted","Data":"5a702f01e770fe1afc8a40c49bdf1e65d49fec17368a1f66815a3e07cae13393"} Jan 23 08:37:37 crc kubenswrapper[4711]: E0123 08:37:37.016432 4711 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 23 08:37:37 crc kubenswrapper[4711]: E0123 08:37:37.016483 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs podName:72674af2-3b9e-47e6-8417-bee428fe826a nodeName:}" failed. No retries permitted until 2026-01-23 08:37:39.016465098 +0000 UTC m=+1044.589421546 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs") pod "openstack-operator-controller-manager-6998f5c585-t278t" (UID: "72674af2-3b9e-47e6-8417-bee428fe826a") : secret "webhook-server-cert" not found Jan 23 08:37:37 crc kubenswrapper[4711]: E0123 08:37:37.016566 4711 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 23 08:37:37 crc kubenswrapper[4711]: E0123 08:37:37.016624 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs podName:72674af2-3b9e-47e6-8417-bee428fe826a nodeName:}" failed. No retries permitted until 2026-01-23 08:37:39.016608172 +0000 UTC m=+1044.589564540 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs") pod "openstack-operator-controller-manager-6998f5c585-t278t" (UID: "72674af2-3b9e-47e6-8417-bee428fe826a") : secret "metrics-server-cert" not found Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.017386 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" event={"ID":"366f8b54-0e7e-4d75-9c62-d174624512e4","Type":"ContainerStarted","Data":"350a5d69544ca8dfa47b7ad19100d853b9a282279839ab514e277ada3f9a1a42"} Jan 23 08:37:37 crc kubenswrapper[4711]: E0123 08:37:37.018136 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:b57d65d2a968705b9067192a7cb33bd4a12489db87e1d05de78c076f2062cab4\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" podUID="b764e900-9f5f-49e6-b6a8-8ad55007cc54" Jan 23 08:37:37 crc kubenswrapper[4711]: E0123 08:37:37.018935 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922\\\"\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" podUID="366f8b54-0e7e-4d75-9c62-d174624512e4" Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.019613 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv" event={"ID":"5726b969-f5cb-4e58-9e8f-92c001f4a7be","Type":"ContainerStarted","Data":"a5ee41ee86fd8cd2b1ac9df08050f527e29ac583d4e4249379cceabb757814f5"} Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.020939 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2" event={"ID":"0c4bb18f-fc6b-49ea-a9c3-971c666a935b","Type":"ContainerStarted","Data":"74d078bd609ce690124c029ba861ac263d1085f0f955c551d0eba5d4fd3e9e78"} Jan 23 08:37:37 crc kubenswrapper[4711]: E0123 08:37:37.020977 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv" podUID="5726b969-f5cb-4e58-9e8f-92c001f4a7be" Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.025233 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr" event={"ID":"cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed","Type":"ContainerStarted","Data":"b09f4636508a8687ab0777f63154e419a272843c55067896e232069cf5bffb6a"} Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.028851 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc" event={"ID":"5ad48aae-ab84-4c93-9d0e-cb4fd24884dd","Type":"ContainerStarted","Data":"2bdc1e653036996bfbc6772962fda4775366806a05bfb82abfc86fd71d2ad2f8"} Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.031450 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww" event={"ID":"f57c2bcd-cd26-420c-a3f9-64b5d4d1a916","Type":"ContainerStarted","Data":"b04200afa53f7df13ef8b629edbe5f11cd05357e36632be61d902092b3ba55b0"} Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.033848 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" event={"ID":"e609d803-23cf-4d04-8587-bdd492c4c4bd","Type":"ContainerStarted","Data":"67412a9dceb6aa7d60562f0e2145c1e95437c48f80353ba3b2bde7d902e7c775"} Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.040585 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" event={"ID":"20db23eb-eb12-458a-9c9e-164f8e3bcab7","Type":"ContainerStarted","Data":"06284ce3aff2c83d71a53b02cdda58e4a8409b1df983de179026498e3174c33f"} Jan 23 08:37:37 crc kubenswrapper[4711]: I0123 08:37:37.048156 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb" event={"ID":"bfc822a0-472c-4e41-99c9-35605ebea5c6","Type":"ContainerStarted","Data":"72aecb39297a7c702947bd7cbcca907fc717690c0e8e9bd5199a0f1bc9a9f973"} Jan 23 08:37:37 crc kubenswrapper[4711]: E0123 08:37:37.051120 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:9caae9b3ee328df678baa26454e45e47693acdadb27f9c635680597aaec43337\\\"\"" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" podUID="e609d803-23cf-4d04-8587-bdd492c4c4bd" Jan 23 08:37:38 crc kubenswrapper[4711]: E0123 08:37:38.097718 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922\\\"\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" podUID="366f8b54-0e7e-4d75-9c62-d174624512e4" Jan 23 08:37:38 crc kubenswrapper[4711]: E0123 08:37:38.098566 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:e02722d7581bfe1c5fc13e2fa6811d8665102ba86635c77547abf6b933cde127\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" podUID="793a9ab1-c243-4a56-9463-3be8147bff44" Jan 23 08:37:38 crc kubenswrapper[4711]: E0123 08:37:38.098621 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d\\\"\"" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" podUID="a1ec006d-63fd-4fac-b5f5-df222bab8638" Jan 23 08:37:38 crc kubenswrapper[4711]: E0123 08:37:38.117406 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:b57d65d2a968705b9067192a7cb33bd4a12489db87e1d05de78c076f2062cab4\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" podUID="b764e900-9f5f-49e6-b6a8-8ad55007cc54" Jan 23 08:37:38 crc kubenswrapper[4711]: E0123 08:37:38.117492 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:9caae9b3ee328df678baa26454e45e47693acdadb27f9c635680597aaec43337\\\"\"" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" podUID="e609d803-23cf-4d04-8587-bdd492c4c4bd" Jan 23 08:37:38 crc kubenswrapper[4711]: E0123 08:37:38.117565 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv" podUID="5726b969-f5cb-4e58-9e8f-92c001f4a7be" Jan 23 08:37:38 crc kubenswrapper[4711]: I0123 08:37:38.437859 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-vs5n6\" (UID: \"467b6d38-a02c-44f9-81bf-3bda90dc4efd\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:38 crc kubenswrapper[4711]: E0123 08:37:38.438187 4711 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 23 08:37:38 crc kubenswrapper[4711]: E0123 08:37:38.438255 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert podName:467b6d38-a02c-44f9-81bf-3bda90dc4efd nodeName:}" failed. No retries permitted until 2026-01-23 08:37:42.438237523 +0000 UTC m=+1048.011193891 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert") pod "infra-operator-controller-manager-54ccf4f85d-vs5n6" (UID: "467b6d38-a02c-44f9-81bf-3bda90dc4efd") : secret "infra-operator-webhook-server-cert" not found Jan 23 08:37:38 crc kubenswrapper[4711]: I0123 08:37:38.640388 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854q774f\" (UID: \"e275a9a3-3a29-498a-bea9-b545730a0301\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:37:38 crc kubenswrapper[4711]: E0123 08:37:38.640618 4711 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 23 08:37:38 crc kubenswrapper[4711]: E0123 08:37:38.640694 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert podName:e275a9a3-3a29-498a-bea9-b545730a0301 nodeName:}" failed. No retries permitted until 2026-01-23 08:37:42.640676066 +0000 UTC m=+1048.213632434 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854q774f" (UID: "e275a9a3-3a29-498a-bea9-b545730a0301") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 23 08:37:39 crc kubenswrapper[4711]: I0123 08:37:39.046004 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:39 crc kubenswrapper[4711]: I0123 08:37:39.046115 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:39 crc kubenswrapper[4711]: E0123 08:37:39.046269 4711 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 23 08:37:39 crc kubenswrapper[4711]: E0123 08:37:39.046291 4711 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 23 08:37:39 crc kubenswrapper[4711]: E0123 08:37:39.046338 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs podName:72674af2-3b9e-47e6-8417-bee428fe826a nodeName:}" failed. No retries permitted until 2026-01-23 08:37:43.04632309 +0000 UTC m=+1048.619279458 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs") pod "openstack-operator-controller-manager-6998f5c585-t278t" (UID: "72674af2-3b9e-47e6-8417-bee428fe826a") : secret "webhook-server-cert" not found Jan 23 08:37:39 crc kubenswrapper[4711]: E0123 08:37:39.046353 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs podName:72674af2-3b9e-47e6-8417-bee428fe826a nodeName:}" failed. No retries permitted until 2026-01-23 08:37:43.046347731 +0000 UTC m=+1048.619304099 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs") pod "openstack-operator-controller-manager-6998f5c585-t278t" (UID: "72674af2-3b9e-47e6-8417-bee428fe826a") : secret "metrics-server-cert" not found Jan 23 08:37:42 crc kubenswrapper[4711]: I0123 08:37:42.500447 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-vs5n6\" (UID: \"467b6d38-a02c-44f9-81bf-3bda90dc4efd\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:42 crc kubenswrapper[4711]: E0123 08:37:42.501815 4711 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 23 08:37:42 crc kubenswrapper[4711]: E0123 08:37:42.501856 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert podName:467b6d38-a02c-44f9-81bf-3bda90dc4efd nodeName:}" failed. No retries permitted until 2026-01-23 08:37:50.501843222 +0000 UTC m=+1056.074799590 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert") pod "infra-operator-controller-manager-54ccf4f85d-vs5n6" (UID: "467b6d38-a02c-44f9-81bf-3bda90dc4efd") : secret "infra-operator-webhook-server-cert" not found Jan 23 08:37:42 crc kubenswrapper[4711]: I0123 08:37:42.703864 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854q774f\" (UID: \"e275a9a3-3a29-498a-bea9-b545730a0301\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:37:42 crc kubenswrapper[4711]: E0123 08:37:42.704026 4711 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 23 08:37:42 crc kubenswrapper[4711]: E0123 08:37:42.704081 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert podName:e275a9a3-3a29-498a-bea9-b545730a0301 nodeName:}" failed. No retries permitted until 2026-01-23 08:37:50.704064309 +0000 UTC m=+1056.277020677 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854q774f" (UID: "e275a9a3-3a29-498a-bea9-b545730a0301") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 23 08:37:43 crc kubenswrapper[4711]: I0123 08:37:43.109105 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:43 crc kubenswrapper[4711]: I0123 08:37:43.109265 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:43 crc kubenswrapper[4711]: E0123 08:37:43.109374 4711 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 23 08:37:43 crc kubenswrapper[4711]: E0123 08:37:43.109456 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs podName:72674af2-3b9e-47e6-8417-bee428fe826a nodeName:}" failed. No retries permitted until 2026-01-23 08:37:51.109439327 +0000 UTC m=+1056.682395685 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs") pod "openstack-operator-controller-manager-6998f5c585-t278t" (UID: "72674af2-3b9e-47e6-8417-bee428fe826a") : secret "webhook-server-cert" not found Jan 23 08:37:43 crc kubenswrapper[4711]: E0123 08:37:43.109773 4711 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 23 08:37:43 crc kubenswrapper[4711]: E0123 08:37:43.109821 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs podName:72674af2-3b9e-47e6-8417-bee428fe826a nodeName:}" failed. No retries permitted until 2026-01-23 08:37:51.109810196 +0000 UTC m=+1056.682766624 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs") pod "openstack-operator-controller-manager-6998f5c585-t278t" (UID: "72674af2-3b9e-47e6-8417-bee428fe826a") : secret "metrics-server-cert" not found Jan 23 08:37:50 crc kubenswrapper[4711]: I0123 08:37:50.530395 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-vs5n6\" (UID: \"467b6d38-a02c-44f9-81bf-3bda90dc4efd\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:50 crc kubenswrapper[4711]: I0123 08:37:50.536458 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/467b6d38-a02c-44f9-81bf-3bda90dc4efd-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-vs5n6\" (UID: \"467b6d38-a02c-44f9-81bf-3bda90dc4efd\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:50 crc kubenswrapper[4711]: I0123 08:37:50.734123 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854q774f\" (UID: \"e275a9a3-3a29-498a-bea9-b545730a0301\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:37:50 crc kubenswrapper[4711]: E0123 08:37:50.734314 4711 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 23 08:37:50 crc kubenswrapper[4711]: E0123 08:37:50.734408 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert podName:e275a9a3-3a29-498a-bea9-b545730a0301 nodeName:}" failed. No retries permitted until 2026-01-23 08:38:06.734379187 +0000 UTC m=+1072.307335565 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854q774f" (UID: "e275a9a3-3a29-498a-bea9-b545730a0301") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 23 08:37:50 crc kubenswrapper[4711]: I0123 08:37:50.834210 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:37:51 crc kubenswrapper[4711]: I0123 08:37:51.139166 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:51 crc kubenswrapper[4711]: I0123 08:37:51.139312 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:37:51 crc kubenswrapper[4711]: E0123 08:37:51.139404 4711 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 23 08:37:51 crc kubenswrapper[4711]: E0123 08:37:51.139548 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs podName:72674af2-3b9e-47e6-8417-bee428fe826a nodeName:}" failed. No retries permitted until 2026-01-23 08:38:07.139499808 +0000 UTC m=+1072.712456176 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs") pod "openstack-operator-controller-manager-6998f5c585-t278t" (UID: "72674af2-3b9e-47e6-8417-bee428fe826a") : secret "metrics-server-cert" not found Jan 23 08:37:51 crc kubenswrapper[4711]: E0123 08:37:51.139555 4711 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 23 08:37:51 crc kubenswrapper[4711]: E0123 08:37:51.139646 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs podName:72674af2-3b9e-47e6-8417-bee428fe826a nodeName:}" failed. No retries permitted until 2026-01-23 08:38:07.139624521 +0000 UTC m=+1072.712580889 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs") pod "openstack-operator-controller-manager-6998f5c585-t278t" (UID: "72674af2-3b9e-47e6-8417-bee428fe826a") : secret "webhook-server-cert" not found Jan 23 08:37:55 crc kubenswrapper[4711]: E0123 08:37:55.047731 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf" Jan 23 08:37:55 crc kubenswrapper[4711]: E0123 08:37:55.048174 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bn5r8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-55db956ddc-plrlz_openstack-operators(0b81979e-a44a-40d2-8eff-958e528d95a1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:37:55 crc kubenswrapper[4711]: E0123 08:37:55.049399 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz" podUID="0b81979e-a44a-40d2-8eff-958e528d95a1" Jan 23 08:37:55 crc kubenswrapper[4711]: E0123 08:37:55.287560 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz" podUID="0b81979e-a44a-40d2-8eff-958e528d95a1" Jan 23 08:37:55 crc kubenswrapper[4711]: I0123 08:37:55.994058 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:37:55 crc kubenswrapper[4711]: I0123 08:37:55.994445 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:37:55 crc kubenswrapper[4711]: I0123 08:37:55.994531 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:37:55 crc kubenswrapper[4711]: I0123 08:37:55.995355 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ba3caaa2b687a97a0322bba7ecb4eece08eed4af73c49fa085ce275a1fc9329c"} pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 23 08:37:55 crc kubenswrapper[4711]: I0123 08:37:55.995450 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" containerID="cri-o://ba3caaa2b687a97a0322bba7ecb4eece08eed4af73c49fa085ce275a1fc9329c" gracePeriod=600 Jan 23 08:37:57 crc kubenswrapper[4711]: I0123 08:37:57.236481 4711 generic.go:334] "Generic (PLEG): container finished" podID="3846d4e0-cfda-4e0b-8747-85267de12736" containerID="ba3caaa2b687a97a0322bba7ecb4eece08eed4af73c49fa085ce275a1fc9329c" exitCode=0 Jan 23 08:37:57 crc kubenswrapper[4711]: I0123 08:37:57.236550 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerDied","Data":"ba3caaa2b687a97a0322bba7ecb4eece08eed4af73c49fa085ce275a1fc9329c"} Jan 23 08:37:57 crc kubenswrapper[4711]: I0123 08:37:57.236581 4711 scope.go:117] "RemoveContainer" containerID="09dfb01b93dac17d4e6980fd3e7ea0054118ce3392de559f35289e1fef65f8f9" Jan 23 08:37:57 crc kubenswrapper[4711]: E0123 08:37:57.514033 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:ff0b6c27e2d96afccd73fbbb5b5297a3f60c7f4f1dfd2a877152466697018d71" Jan 23 08:37:57 crc kubenswrapper[4711]: E0123 08:37:57.514216 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:ff0b6c27e2d96afccd73fbbb5b5297a3f60c7f4f1dfd2a877152466697018d71,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d4dc9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-c87fff755-bjrrb_openstack-operators(bfc822a0-472c-4e41-99c9-35605ebea5c6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:37:57 crc kubenswrapper[4711]: E0123 08:37:57.515564 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb" podUID="bfc822a0-472c-4e41-99c9-35605ebea5c6" Jan 23 08:37:58 crc kubenswrapper[4711]: E0123 08:37:58.245896 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:ff0b6c27e2d96afccd73fbbb5b5297a3f60c7f4f1dfd2a877152466697018d71\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb" podUID="bfc822a0-472c-4e41-99c9-35605ebea5c6" Jan 23 08:37:58 crc kubenswrapper[4711]: E0123 08:37:58.284719 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:3311e627bcb860d9443592a2c67078417318c9eb77d8ef4d07f9aa7027d46822" Jan 23 08:37:58 crc kubenswrapper[4711]: E0123 08:37:58.284895 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:3311e627bcb860d9443592a2c67078417318c9eb77d8ef4d07f9aa7027d46822,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-76czs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-77d5c5b54f-v7tnr_openstack-operators(cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:37:58 crc kubenswrapper[4711]: E0123 08:37:58.286049 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr" podUID="cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed" Jan 23 08:37:59 crc kubenswrapper[4711]: E0123 08:37:59.251683 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:3311e627bcb860d9443592a2c67078417318c9eb77d8ef4d07f9aa7027d46822\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr" podUID="cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed" Jan 23 08:37:59 crc kubenswrapper[4711]: E0123 08:37:59.328117 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349" Jan 23 08:37:59 crc kubenswrapper[4711]: E0123 08:37:59.328323 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j5ls4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-b8b6d4659-9fsww_openstack-operators(f57c2bcd-cd26-420c-a3f9-64b5d4d1a916): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:37:59 crc kubenswrapper[4711]: E0123 08:37:59.329556 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww" podUID="f57c2bcd-cd26-420c-a3f9-64b5d4d1a916" Jan 23 08:38:00 crc kubenswrapper[4711]: E0123 08:38:00.076055 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:6c88312afa9673f7b72c558368034d7a488ead73080cdcdf581fe85b99263ece" Jan 23 08:38:00 crc kubenswrapper[4711]: E0123 08:38:00.076597 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:6c88312afa9673f7b72c558368034d7a488ead73080cdcdf581fe85b99263ece,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nzxbd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-b45d7bf98-4pw4h_openstack-operators(c6c6b995-fa92-4cf2-87a1-361881e8c284): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:38:00 crc kubenswrapper[4711]: E0123 08:38:00.077786 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h" podUID="c6c6b995-fa92-4cf2-87a1-361881e8c284" Jan 23 08:38:00 crc kubenswrapper[4711]: E0123 08:38:00.259645 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww" podUID="f57c2bcd-cd26-420c-a3f9-64b5d4d1a916" Jan 23 08:38:00 crc kubenswrapper[4711]: E0123 08:38:00.259684 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:6c88312afa9673f7b72c558368034d7a488ead73080cdcdf581fe85b99263ece\\\"\"" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h" podUID="c6c6b995-fa92-4cf2-87a1-361881e8c284" Jan 23 08:38:00 crc kubenswrapper[4711]: E0123 08:38:00.748953 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:a8fc8f9d445b1232f446119015b226008b07c6a259f5bebc1fcbb39ec310afe5" Jan 23 08:38:00 crc kubenswrapper[4711]: E0123 08:38:00.749196 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:a8fc8f9d445b1232f446119015b226008b07c6a259f5bebc1fcbb39ec310afe5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5bh27,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-7bd9774b6-cmn6r_openstack-operators(f1b9f385-b045-407d-a56c-87750c1c5972): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:38:00 crc kubenswrapper[4711]: E0123 08:38:00.750440 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r" podUID="f1b9f385-b045-407d-a56c-87750c1c5972" Jan 23 08:38:01 crc kubenswrapper[4711]: E0123 08:38:01.239740 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:2f9a2f064448faebbae58f52d564dc0e8e39bed0fc12bd6b9fe925e42f1b5492" Jan 23 08:38:01 crc kubenswrapper[4711]: E0123 08:38:01.239971 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:2f9a2f064448faebbae58f52d564dc0e8e39bed0fc12bd6b9fe925e42f1b5492,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kspv2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-594c8c9d5d-sk67x_openstack-operators(7c54a5ef-3d58-4010-875b-8b6022692c7e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:38:01 crc kubenswrapper[4711]: E0123 08:38:01.241208 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x" podUID="7c54a5ef-3d58-4010-875b-8b6022692c7e" Jan 23 08:38:01 crc kubenswrapper[4711]: E0123 08:38:01.266667 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:a8fc8f9d445b1232f446119015b226008b07c6a259f5bebc1fcbb39ec310afe5\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r" podUID="f1b9f385-b045-407d-a56c-87750c1c5972" Jan 23 08:38:01 crc kubenswrapper[4711]: E0123 08:38:01.266784 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:2f9a2f064448faebbae58f52d564dc0e8e39bed0fc12bd6b9fe925e42f1b5492\\\"\"" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x" podUID="7c54a5ef-3d58-4010-875b-8b6022692c7e" Jan 23 08:38:01 crc kubenswrapper[4711]: E0123 08:38:01.317022 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.129.56.147:5001/openstack-k8s-operators/nova-operator:a7a5f7314f205e3432275992a0e63e9a097fd014" Jan 23 08:38:01 crc kubenswrapper[4711]: E0123 08:38:01.317096 4711 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.129.56.147:5001/openstack-k8s-operators/nova-operator:a7a5f7314f205e3432275992a0e63e9a097fd014" Jan 23 08:38:01 crc kubenswrapper[4711]: E0123 08:38:01.317291 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.129.56.147:5001/openstack-k8s-operators/nova-operator:a7a5f7314f205e3432275992a0e63e9a097fd014,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lpzpp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-5db5449586-d5x8p_openstack-operators(20db23eb-eb12-458a-9c9e-164f8e3bcab7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:38:01 crc kubenswrapper[4711]: E0123 08:38:01.318579 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" podUID="20db23eb-eb12-458a-9c9e-164f8e3bcab7" Jan 23 08:38:02 crc kubenswrapper[4711]: E0123 08:38:02.272489 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.147:5001/openstack-k8s-operators/nova-operator:a7a5f7314f205e3432275992a0e63e9a097fd014\\\"\"" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" podUID="20db23eb-eb12-458a-9c9e-164f8e3bcab7" Jan 23 08:38:06 crc kubenswrapper[4711]: I0123 08:38:06.794069 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854q774f\" (UID: \"e275a9a3-3a29-498a-bea9-b545730a0301\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:38:06 crc kubenswrapper[4711]: I0123 08:38:06.806678 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e275a9a3-3a29-498a-bea9-b545730a0301-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854q774f\" (UID: \"e275a9a3-3a29-498a-bea9-b545730a0301\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:38:06 crc kubenswrapper[4711]: I0123 08:38:06.844329 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:38:07 crc kubenswrapper[4711]: I0123 08:38:07.199646 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:38:07 crc kubenswrapper[4711]: I0123 08:38:07.200148 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:38:07 crc kubenswrapper[4711]: I0123 08:38:07.205636 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-webhook-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:38:07 crc kubenswrapper[4711]: I0123 08:38:07.211552 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72674af2-3b9e-47e6-8417-bee428fe826a-metrics-certs\") pod \"openstack-operator-controller-manager-6998f5c585-t278t\" (UID: \"72674af2-3b9e-47e6-8417-bee428fe826a\") " pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:38:07 crc kubenswrapper[4711]: I0123 08:38:07.339347 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.123235 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6"] Jan 23 08:38:08 crc kubenswrapper[4711]: W0123 08:38:08.134887 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod467b6d38_a02c_44f9_81bf_3bda90dc4efd.slice/crio-91c3b5903d8b27ba89a8f246952d1e46a664565dbb1dec495067f7172007061e WatchSource:0}: Error finding container 91c3b5903d8b27ba89a8f246952d1e46a664565dbb1dec495067f7172007061e: Status 404 returned error can't find the container with id 91c3b5903d8b27ba89a8f246952d1e46a664565dbb1dec495067f7172007061e Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.213058 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t"] Jan 23 08:38:08 crc kubenswrapper[4711]: W0123 08:38:08.223874 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72674af2_3b9e_47e6_8417_bee428fe826a.slice/crio-4a855f06592682a9aa38cb9d9b3a629396da69a5e31fbe4b173095367349d783 WatchSource:0}: Error finding container 4a855f06592682a9aa38cb9d9b3a629396da69a5e31fbe4b173095367349d783: Status 404 returned error can't find the container with id 4a855f06592682a9aa38cb9d9b3a629396da69a5e31fbe4b173095367349d783 Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.259466 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f"] Jan 23 08:38:08 crc kubenswrapper[4711]: W0123 08:38:08.292456 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode275a9a3_3a29_498a_bea9_b545730a0301.slice/crio-043f026348e6333bf762c272de55b6982820ac4cb71b0ae6f288246e40b5c68e WatchSource:0}: Error finding container 043f026348e6333bf762c272de55b6982820ac4cb71b0ae6f288246e40b5c68e: Status 404 returned error can't find the container with id 043f026348e6333bf762c272de55b6982820ac4cb71b0ae6f288246e40b5c68e Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.344107 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9" event={"ID":"a7d6e419-04ce-4f5c-93a9-34d14a8c531a","Type":"ContainerStarted","Data":"09feada431ea4367ca1eb234ad99e080ea30e4c4b4b7e4441d1ca2fab9df538b"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.344927 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.351950 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" event={"ID":"a1ec006d-63fd-4fac-b5f5-df222bab8638","Type":"ContainerStarted","Data":"9614eaf5eb309541338421a3aec63434001ff7335faf7a7e79499117fc399524"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.352236 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.368756 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb" event={"ID":"458cb03b-3e35-4219-8009-08829d99da25","Type":"ContainerStarted","Data":"262a4b3bccc2f3fe342a953599d33ed40f6d2caacfe35721574ddcfbdb5e0474"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.369015 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.377305 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2" event={"ID":"0c4bb18f-fc6b-49ea-a9c3-971c666a935b","Type":"ContainerStarted","Data":"eb11b7a678e5bc3788c7c0b37def0dc64a86b6a3903b1049121f9140bc1a8f2e"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.377912 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.380901 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" event={"ID":"467b6d38-a02c-44f9-81bf-3bda90dc4efd","Type":"ContainerStarted","Data":"91c3b5903d8b27ba89a8f246952d1e46a664565dbb1dec495067f7172007061e"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.385532 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9" podStartSLOduration=8.08089878 podStartE2EDuration="34.38549955s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:35.415208211 +0000 UTC m=+1040.988164579" lastFinishedPulling="2026-01-23 08:38:01.719808981 +0000 UTC m=+1067.292765349" observedRunningTime="2026-01-23 08:38:08.378189271 +0000 UTC m=+1073.951145639" watchObservedRunningTime="2026-01-23 08:38:08.38549955 +0000 UTC m=+1073.958455918" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.389767 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt" event={"ID":"9eb50a74-06d4-4b43-a0b9-245354b3cde7","Type":"ContainerStarted","Data":"f6382d959d8dc04b7c687edaff2839ee972c0acb39c866814c540bc466fd4b8a"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.390437 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.399767 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" event={"ID":"e609d803-23cf-4d04-8587-bdd492c4c4bd","Type":"ContainerStarted","Data":"9b71042d0dbfdef9ca54581a23071733006c503e547cc06c3fd3ed8b1dea8f60"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.400409 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.401381 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" event={"ID":"72674af2-3b9e-47e6-8417-bee428fe826a","Type":"ContainerStarted","Data":"4a855f06592682a9aa38cb9d9b3a629396da69a5e31fbe4b173095367349d783"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.414191 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" event={"ID":"366f8b54-0e7e-4d75-9c62-d174624512e4","Type":"ContainerStarted","Data":"f93d6eabe0cf9b8659243d396f31be68ebf3edfd7d53642f8207fc2d4e752cc0"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.414943 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.417744 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2" podStartSLOduration=8.381626959 podStartE2EDuration="34.417726549s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.677885364 +0000 UTC m=+1042.250841732" lastFinishedPulling="2026-01-23 08:38:02.713984954 +0000 UTC m=+1068.286941322" observedRunningTime="2026-01-23 08:38:08.412966372 +0000 UTC m=+1073.985922730" watchObservedRunningTime="2026-01-23 08:38:08.417726549 +0000 UTC m=+1073.990682917" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.431329 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" event={"ID":"e275a9a3-3a29-498a-bea9-b545730a0301","Type":"ContainerStarted","Data":"043f026348e6333bf762c272de55b6982820ac4cb71b0ae6f288246e40b5c68e"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.444171 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb" podStartSLOduration=7.803900495 podStartE2EDuration="34.444153705s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.887698388 +0000 UTC m=+1042.460654756" lastFinishedPulling="2026-01-23 08:38:03.527951588 +0000 UTC m=+1069.100907966" observedRunningTime="2026-01-23 08:38:08.439762898 +0000 UTC m=+1074.012719266" watchObservedRunningTime="2026-01-23 08:38:08.444153705 +0000 UTC m=+1074.017110073" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.459790 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"9ef6df8407452842a81bff9ff371dec1b0be0a97894fe9cf1da32e295f2f3558"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.465762 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2" event={"ID":"effbd1c1-9a1a-4a5c-9955-6a1005746383","Type":"ContainerStarted","Data":"b4d179130443766b554dc052896e4b26d49599cd3d564e50f3abec6f58cba736"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.466549 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.472575 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" event={"ID":"793a9ab1-c243-4a56-9463-3be8147bff44","Type":"ContainerStarted","Data":"cbc726a4ee100d405b8bfd5dd11efaaf92fc977ed7a218e6e982368b26d3e073"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.472777 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.479002 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc" event={"ID":"5ad48aae-ab84-4c93-9d0e-cb4fd24884dd","Type":"ContainerStarted","Data":"78697dbc4c65ea7448abe859b5aa69944dd404b1a711fdc7729a2333d05b93ab"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.479751 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.484822 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" event={"ID":"b764e900-9f5f-49e6-b6a8-8ad55007cc54","Type":"ContainerStarted","Data":"d86730c2a00a6d3a5b5a1e688d0d4bffd9e7643aaceec04975c52534300c97fd"} Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.485027 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.527472 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" podStartSLOduration=3.831727704 podStartE2EDuration="34.527428513s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.92293872 +0000 UTC m=+1042.495895088" lastFinishedPulling="2026-01-23 08:38:07.618639529 +0000 UTC m=+1073.191595897" observedRunningTime="2026-01-23 08:38:08.497396278 +0000 UTC m=+1074.070352646" watchObservedRunningTime="2026-01-23 08:38:08.527428513 +0000 UTC m=+1074.100384891" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.646173 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" podStartSLOduration=3.929325311 podStartE2EDuration="34.646158708s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.922918389 +0000 UTC m=+1042.495874757" lastFinishedPulling="2026-01-23 08:38:07.639751786 +0000 UTC m=+1073.212708154" observedRunningTime="2026-01-23 08:38:08.644987869 +0000 UTC m=+1074.217944237" watchObservedRunningTime="2026-01-23 08:38:08.646158708 +0000 UTC m=+1074.219115076" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.779940 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" podStartSLOduration=4.079186818 podStartE2EDuration="34.77992312s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.918090582 +0000 UTC m=+1042.491046950" lastFinishedPulling="2026-01-23 08:38:07.618826874 +0000 UTC m=+1073.191783252" observedRunningTime="2026-01-23 08:38:08.777665396 +0000 UTC m=+1074.350621754" watchObservedRunningTime="2026-01-23 08:38:08.77992312 +0000 UTC m=+1074.352879488" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.826049 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" podStartSLOduration=4.108609028 podStartE2EDuration="34.826024388s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.907571964 +0000 UTC m=+1042.480528332" lastFinishedPulling="2026-01-23 08:38:07.624987324 +0000 UTC m=+1073.197943692" observedRunningTime="2026-01-23 08:38:08.814880395 +0000 UTC m=+1074.387836763" watchObservedRunningTime="2026-01-23 08:38:08.826024388 +0000 UTC m=+1074.398980756" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.927476 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2" podStartSLOduration=8.711900478 podStartE2EDuration="34.927454539s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.497370477 +0000 UTC m=+1042.070326845" lastFinishedPulling="2026-01-23 08:38:02.712924548 +0000 UTC m=+1068.285880906" observedRunningTime="2026-01-23 08:38:08.866799306 +0000 UTC m=+1074.439755674" watchObservedRunningTime="2026-01-23 08:38:08.927454539 +0000 UTC m=+1074.500410907" Jan 23 08:38:08 crc kubenswrapper[4711]: I0123 08:38:08.929957 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt" podStartSLOduration=8.270850169 podStartE2EDuration="34.92994349s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.86897509 +0000 UTC m=+1042.441931468" lastFinishedPulling="2026-01-23 08:38:03.528068421 +0000 UTC m=+1069.101024789" observedRunningTime="2026-01-23 08:38:08.925725447 +0000 UTC m=+1074.498681815" watchObservedRunningTime="2026-01-23 08:38:08.92994349 +0000 UTC m=+1074.502899858" Jan 23 08:38:09 crc kubenswrapper[4711]: I0123 08:38:09.013088 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc" podStartSLOduration=7.494175408 podStartE2EDuration="35.013070725s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.528194202 +0000 UTC m=+1042.101150570" lastFinishedPulling="2026-01-23 08:38:04.047089529 +0000 UTC m=+1069.620045887" observedRunningTime="2026-01-23 08:38:08.996036037 +0000 UTC m=+1074.568992405" watchObservedRunningTime="2026-01-23 08:38:09.013070725 +0000 UTC m=+1074.586027093" Jan 23 08:38:09 crc kubenswrapper[4711]: I0123 08:38:09.031069 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" podStartSLOduration=4.333683534 podStartE2EDuration="35.031053484s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.926984229 +0000 UTC m=+1042.499940597" lastFinishedPulling="2026-01-23 08:38:07.624354179 +0000 UTC m=+1073.197310547" observedRunningTime="2026-01-23 08:38:09.029544077 +0000 UTC m=+1074.602500445" watchObservedRunningTime="2026-01-23 08:38:09.031053484 +0000 UTC m=+1074.604009852" Jan 23 08:38:09 crc kubenswrapper[4711]: I0123 08:38:09.497806 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" event={"ID":"72674af2-3b9e-47e6-8417-bee428fe826a","Type":"ContainerStarted","Data":"0ef2cc8b3be00ef716c5063dcf5f69e1a9f19d1c0be8928e674b2ab496e67cfc"} Jan 23 08:38:09 crc kubenswrapper[4711]: I0123 08:38:09.499124 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:38:09 crc kubenswrapper[4711]: I0123 08:38:09.500926 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv" event={"ID":"5726b969-f5cb-4e58-9e8f-92c001f4a7be","Type":"ContainerStarted","Data":"ae1a90629b6d10e6ef434a2d9bd03c463bbe6d6cee41e36bf77b2583013c9770"} Jan 23 08:38:09 crc kubenswrapper[4711]: I0123 08:38:09.550233 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" podStartSLOduration=34.550213586 podStartE2EDuration="34.550213586s" podCreationTimestamp="2026-01-23 08:37:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:38:09.536042909 +0000 UTC m=+1075.108999287" watchObservedRunningTime="2026-01-23 08:38:09.550213586 +0000 UTC m=+1075.123169954" Jan 23 08:38:09 crc kubenswrapper[4711]: I0123 08:38:09.571601 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-hwttv" podStartSLOduration=3.459562175 podStartE2EDuration="34.571579339s" podCreationTimestamp="2026-01-23 08:37:35 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.88817312 +0000 UTC m=+1042.461129488" lastFinishedPulling="2026-01-23 08:38:08.000190264 +0000 UTC m=+1073.573146652" observedRunningTime="2026-01-23 08:38:09.56632884 +0000 UTC m=+1075.139285208" watchObservedRunningTime="2026-01-23 08:38:09.571579339 +0000 UTC m=+1075.144535707" Jan 23 08:38:14 crc kubenswrapper[4711]: I0123 08:38:14.638496 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-gw4s9" Jan 23 08:38:14 crc kubenswrapper[4711]: I0123 08:38:14.825961 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-ghlgc" Jan 23 08:38:14 crc kubenswrapper[4711]: I0123 08:38:14.956919 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-8ptr2" Jan 23 08:38:15 crc kubenswrapper[4711]: I0123 08:38:15.047259 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-42kx2" Jan 23 08:38:15 crc kubenswrapper[4711]: I0123 08:38:15.098624 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-62nhb" Jan 23 08:38:15 crc kubenswrapper[4711]: I0123 08:38:15.362165 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-4lhtg" Jan 23 08:38:15 crc kubenswrapper[4711]: I0123 08:38:15.400370 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-kjjf2" Jan 23 08:38:15 crc kubenswrapper[4711]: I0123 08:38:15.401450 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-ctqxt" Jan 23 08:38:15 crc kubenswrapper[4711]: I0123 08:38:15.455225 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-6wgwv" Jan 23 08:38:15 crc kubenswrapper[4711]: I0123 08:38:15.457983 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-qnzz6" Jan 23 08:38:15 crc kubenswrapper[4711]: I0123 08:38:15.497093 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-lvcpb" Jan 23 08:38:17 crc kubenswrapper[4711]: I0123 08:38:17.348612 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6998f5c585-t278t" Jan 23 08:38:21 crc kubenswrapper[4711]: E0123 08:38:21.937465 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = writing blob: storing blob to file \"/var/tmp/container_images_storage677260795/1\": happened during read: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:dae767a3ae652ffc70ba60c5bf2b5bf72c12d939353053e231b258948ededb22" Jan 23 08:38:21 crc kubenswrapper[4711]: E0123 08:38:21.956848 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:dae767a3ae652ffc70ba60c5bf2b5bf72c12d939353053e231b258948ededb22,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_API_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_PROC_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-processor:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-scl8c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-6b68b8b854q774f_openstack-operators(e275a9a3-3a29-498a-bea9-b545730a0301): ErrImagePull: rpc error: code = Canceled desc = writing blob: storing blob to file \"/var/tmp/container_images_storage677260795/1\": happened during read: context canceled" logger="UnhandledError" Jan 23 08:38:21 crc kubenswrapper[4711]: E0123 08:38:21.959036 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = writing blob: storing blob to file \\\"/var/tmp/container_images_storage677260795/1\\\": happened during read: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" podUID="e275a9a3-3a29-498a-bea9-b545730a0301" Jan 23 08:38:22 crc kubenswrapper[4711]: E0123 08:38:22.594824 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:dae767a3ae652ffc70ba60c5bf2b5bf72c12d939353053e231b258948ededb22\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" podUID="e275a9a3-3a29-498a-bea9-b545730a0301" Jan 23 08:38:23 crc kubenswrapper[4711]: E0123 08:38:23.838638 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:2eac1b9dadaddf4734f35e3dd1996dca960e97d2f304cbd48254b900a840a84a" Jan 23 08:38:23 crc kubenswrapper[4711]: E0123 08:38:23.838819 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:2eac1b9dadaddf4734f35e3dd1996dca960e97d2f304cbd48254b900a840a84a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f4sbk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-54ccf4f85d-vs5n6_openstack-operators(467b6d38-a02c-44f9-81bf-3bda90dc4efd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:38:23 crc kubenswrapper[4711]: E0123 08:38:23.840019 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" podUID="467b6d38-a02c-44f9-81bf-3bda90dc4efd" Jan 23 08:38:24 crc kubenswrapper[4711]: E0123 08:38:24.843830 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:2eac1b9dadaddf4734f35e3dd1996dca960e97d2f304cbd48254b900a840a84a\\\"\"" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" podUID="467b6d38-a02c-44f9-81bf-3bda90dc4efd" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.615780 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz" event={"ID":"0b81979e-a44a-40d2-8eff-958e528d95a1","Type":"ContainerStarted","Data":"cf69711868edf273b85a26d00201429c11fef078cc99f2112894fb643b85e681"} Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.617140 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.618735 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h" event={"ID":"c6c6b995-fa92-4cf2-87a1-361881e8c284","Type":"ContainerStarted","Data":"0a727400bf9f4382817dd9f21c1917e4e27eac2e28beff2e5db0c2273a650209"} Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.619284 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.620916 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" event={"ID":"20db23eb-eb12-458a-9c9e-164f8e3bcab7","Type":"ContainerStarted","Data":"6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8"} Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.621337 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.622235 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb" event={"ID":"bfc822a0-472c-4e41-99c9-35605ebea5c6","Type":"ContainerStarted","Data":"37c309fa36a3c7d908cb037ed16070ae7c6ae3a4e95e10fbe9d8a1dad84ff2a4"} Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.622441 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.623646 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr" event={"ID":"cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed","Type":"ContainerStarted","Data":"89bb6d9740e302364b5592d2fc781512026f6afab90bf19801919eeae9c26418"} Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.623844 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.624950 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww" event={"ID":"f57c2bcd-cd26-420c-a3f9-64b5d4d1a916","Type":"ContainerStarted","Data":"417b4b1da5902c42358958da6e2cef5e69f5a54484f1c1548051a7fcce0750af"} Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.625399 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.630739 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x" event={"ID":"7c54a5ef-3d58-4010-875b-8b6022692c7e","Type":"ContainerStarted","Data":"ed2472bf8aafc4885a108ce324874a11be80df22972c9be6b67cd0c01f6310f6"} Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.630983 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.631930 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r" event={"ID":"f1b9f385-b045-407d-a56c-87750c1c5972","Type":"ContainerStarted","Data":"2ae3d5078e183a8d0167817263006fc362351dadbefce6854c53474862e241b9"} Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.632134 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.639079 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz" podStartSLOduration=3.108884209 podStartE2EDuration="51.639059709s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.506728067 +0000 UTC m=+1042.079684435" lastFinishedPulling="2026-01-23 08:38:25.036903527 +0000 UTC m=+1090.609859935" observedRunningTime="2026-01-23 08:38:25.634253502 +0000 UTC m=+1091.207209870" watchObservedRunningTime="2026-01-23 08:38:25.639059709 +0000 UTC m=+1091.212016087" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.688586 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" podStartSLOduration=3.487192495 podStartE2EDuration="51.688563591s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.887948034 +0000 UTC m=+1042.460904392" lastFinishedPulling="2026-01-23 08:38:25.08931912 +0000 UTC m=+1090.662275488" observedRunningTime="2026-01-23 08:38:25.670804106 +0000 UTC m=+1091.243760474" watchObservedRunningTime="2026-01-23 08:38:25.688563591 +0000 UTC m=+1091.261519959" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.733376 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x" podStartSLOduration=3.141804854 podStartE2EDuration="51.733353965s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.5424079 +0000 UTC m=+1042.115364268" lastFinishedPulling="2026-01-23 08:38:25.133957011 +0000 UTC m=+1090.706913379" observedRunningTime="2026-01-23 08:38:25.728463286 +0000 UTC m=+1091.301419664" watchObservedRunningTime="2026-01-23 08:38:25.733353965 +0000 UTC m=+1091.306310333" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.878497 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr" podStartSLOduration=3.34153436 podStartE2EDuration="51.878480046s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.547460283 +0000 UTC m=+1042.120416651" lastFinishedPulling="2026-01-23 08:38:25.084405969 +0000 UTC m=+1090.657362337" observedRunningTime="2026-01-23 08:38:25.87777593 +0000 UTC m=+1091.450732298" watchObservedRunningTime="2026-01-23 08:38:25.878480046 +0000 UTC m=+1091.451436414" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.879813 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww" podStartSLOduration=3.284229819 podStartE2EDuration="51.879804489s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.49009118 +0000 UTC m=+1042.063047548" lastFinishedPulling="2026-01-23 08:38:25.08566583 +0000 UTC m=+1090.658622218" observedRunningTime="2026-01-23 08:38:25.799003922 +0000 UTC m=+1091.371960290" watchObservedRunningTime="2026-01-23 08:38:25.879804489 +0000 UTC m=+1091.452760857" Jan 23 08:38:25 crc kubenswrapper[4711]: I0123 08:38:25.946275 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb" podStartSLOduration=3.33578203 podStartE2EDuration="51.946257805s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.474288313 +0000 UTC m=+1042.047244681" lastFinishedPulling="2026-01-23 08:38:25.084764088 +0000 UTC m=+1090.657720456" observedRunningTime="2026-01-23 08:38:25.942878592 +0000 UTC m=+1091.515834960" watchObservedRunningTime="2026-01-23 08:38:25.946257805 +0000 UTC m=+1091.519214173" Jan 23 08:38:26 crc kubenswrapper[4711]: I0123 08:38:26.002049 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h" podStartSLOduration=2.628947756 podStartE2EDuration="52.002033499s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:35.710931447 +0000 UTC m=+1041.283887815" lastFinishedPulling="2026-01-23 08:38:25.08401718 +0000 UTC m=+1090.656973558" observedRunningTime="2026-01-23 08:38:25.997876447 +0000 UTC m=+1091.570832815" watchObservedRunningTime="2026-01-23 08:38:26.002033499 +0000 UTC m=+1091.574989867" Jan 23 08:38:26 crc kubenswrapper[4711]: I0123 08:38:26.026161 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r" podStartSLOduration=3.485007882 podStartE2EDuration="52.02614191s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:37:36.547757621 +0000 UTC m=+1042.120713989" lastFinishedPulling="2026-01-23 08:38:25.088891639 +0000 UTC m=+1090.661848017" observedRunningTime="2026-01-23 08:38:26.022262294 +0000 UTC m=+1091.595218662" watchObservedRunningTime="2026-01-23 08:38:26.02614191 +0000 UTC m=+1091.599098278" Jan 23 08:38:34 crc kubenswrapper[4711]: I0123 08:38:34.717270 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-4pw4h" Jan 23 08:38:34 crc kubenswrapper[4711]: I0123 08:38:34.900587 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-9fsww" Jan 23 08:38:35 crc kubenswrapper[4711]: I0123 08:38:35.030138 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-plrlz" Jan 23 08:38:35 crc kubenswrapper[4711]: I0123 08:38:35.038608 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-bjrrb" Jan 23 08:38:35 crc kubenswrapper[4711]: I0123 08:38:35.077417 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-sk67x" Jan 23 08:38:35 crc kubenswrapper[4711]: I0123 08:38:35.118815 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-v7tnr" Jan 23 08:38:35 crc kubenswrapper[4711]: I0123 08:38:35.121228 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-cmn6r" Jan 23 08:38:35 crc kubenswrapper[4711]: I0123 08:38:35.277675 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" Jan 23 08:38:38 crc kubenswrapper[4711]: I0123 08:38:38.742141 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" event={"ID":"467b6d38-a02c-44f9-81bf-3bda90dc4efd","Type":"ContainerStarted","Data":"681fa8d1e3429a0601ac27a7423977a983eaca42c45cf56e87f83a24960c3814"} Jan 23 08:38:38 crc kubenswrapper[4711]: I0123 08:38:38.742919 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:38:38 crc kubenswrapper[4711]: I0123 08:38:38.765191 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" podStartSLOduration=34.698663502 podStartE2EDuration="1m4.765171506s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:38:08.148147854 +0000 UTC m=+1073.721104232" lastFinishedPulling="2026-01-23 08:38:38.214655868 +0000 UTC m=+1103.787612236" observedRunningTime="2026-01-23 08:38:38.760118922 +0000 UTC m=+1104.333075280" watchObservedRunningTime="2026-01-23 08:38:38.765171506 +0000 UTC m=+1104.338127874" Jan 23 08:38:39 crc kubenswrapper[4711]: I0123 08:38:39.766213 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" event={"ID":"e275a9a3-3a29-498a-bea9-b545730a0301","Type":"ContainerStarted","Data":"618d16610715b618a77ea96e3fe3a0242538d884019e8a4a20c2c2ac88fa8bab"} Jan 23 08:38:39 crc kubenswrapper[4711]: I0123 08:38:39.767029 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:38:46 crc kubenswrapper[4711]: I0123 08:38:46.850627 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" Jan 23 08:38:46 crc kubenswrapper[4711]: I0123 08:38:46.877430 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854q774f" podStartSLOduration=42.136524939 podStartE2EDuration="1m12.877409497s" podCreationTimestamp="2026-01-23 08:37:34 +0000 UTC" firstStartedPulling="2026-01-23 08:38:08.294380251 +0000 UTC m=+1073.867336609" lastFinishedPulling="2026-01-23 08:38:39.035264789 +0000 UTC m=+1104.608221167" observedRunningTime="2026-01-23 08:38:39.806022932 +0000 UTC m=+1105.378979300" watchObservedRunningTime="2026-01-23 08:38:46.877409497 +0000 UTC m=+1112.450365855" Jan 23 08:38:50 crc kubenswrapper[4711]: I0123 08:38:50.839918 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-vs5n6" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.598148 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/rabbitmq-server-0"] Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.599773 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.602978 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-erlang-cookie" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.602999 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openshift-service-ca.crt" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.604331 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-plugins-conf" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.604843 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-default-user" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.606981 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-server-conf" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.607025 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"kube-root-ca.crt" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.607068 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-server-dockercfg-zztzs" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.613542 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-server-0"] Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.626356 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f970c1db-48d5-4b49-afc1-eee7e1289da9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.626421 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f970c1db-48d5-4b49-afc1-eee7e1289da9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.626443 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f970c1db-48d5-4b49-afc1-eee7e1289da9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.626588 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-50b0df63-411d-4c10-96b3-1a416610e967\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-50b0df63-411d-4c10-96b3-1a416610e967\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.626616 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f970c1db-48d5-4b49-afc1-eee7e1289da9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.626652 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f970c1db-48d5-4b49-afc1-eee7e1289da9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.626702 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f970c1db-48d5-4b49-afc1-eee7e1289da9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.626722 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f970c1db-48d5-4b49-afc1-eee7e1289da9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.626759 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flrbn\" (UniqueName: \"kubernetes.io/projected/f970c1db-48d5-4b49-afc1-eee7e1289da9-kube-api-access-flrbn\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.728416 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f970c1db-48d5-4b49-afc1-eee7e1289da9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.728464 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f970c1db-48d5-4b49-afc1-eee7e1289da9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.728482 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f970c1db-48d5-4b49-afc1-eee7e1289da9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.728528 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flrbn\" (UniqueName: \"kubernetes.io/projected/f970c1db-48d5-4b49-afc1-eee7e1289da9-kube-api-access-flrbn\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.728579 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f970c1db-48d5-4b49-afc1-eee7e1289da9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.728597 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f970c1db-48d5-4b49-afc1-eee7e1289da9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.728610 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f970c1db-48d5-4b49-afc1-eee7e1289da9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.728653 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-50b0df63-411d-4c10-96b3-1a416610e967\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-50b0df63-411d-4c10-96b3-1a416610e967\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.728669 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f970c1db-48d5-4b49-afc1-eee7e1289da9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.728937 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f970c1db-48d5-4b49-afc1-eee7e1289da9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.729075 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f970c1db-48d5-4b49-afc1-eee7e1289da9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.852199 4711 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.852249 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-50b0df63-411d-4c10-96b3-1a416610e967\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-50b0df63-411d-4c10-96b3-1a416610e967\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2c52ba18edf172c3c86ff633cfa4a5c8d43cf208f66a5e33ba940d1f84a388e4/globalmount\"" pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.852385 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f970c1db-48d5-4b49-afc1-eee7e1289da9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.856765 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f970c1db-48d5-4b49-afc1-eee7e1289da9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.857334 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f970c1db-48d5-4b49-afc1-eee7e1289da9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.863417 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f970c1db-48d5-4b49-afc1-eee7e1289da9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.877181 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/rabbitmq-broadcaster-server-0"] Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.878742 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f970c1db-48d5-4b49-afc1-eee7e1289da9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.883772 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.887110 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-broadcaster-erlang-cookie" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.888618 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-broadcaster-default-user" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.889414 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-broadcaster-server-conf" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.889674 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-broadcaster-plugins-conf" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.889917 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-broadcaster-server-dockercfg-f7hhx" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.917139 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flrbn\" (UniqueName: \"kubernetes.io/projected/f970c1db-48d5-4b49-afc1-eee7e1289da9-kube-api-access-flrbn\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.919403 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-broadcaster-server-0"] Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.928048 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-50b0df63-411d-4c10-96b3-1a416610e967\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-50b0df63-411d-4c10-96b3-1a416610e967\") pod \"rabbitmq-server-0\" (UID: \"f970c1db-48d5-4b49-afc1-eee7e1289da9\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.931016 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-rabbitmq-erlang-cookie\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.931068 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d5ba534f-86cf-4935-86e8-33e1fc37a703\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d5ba534f-86cf-4935-86e8-33e1fc37a703\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.931097 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcb5l\" (UniqueName: \"kubernetes.io/projected/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-kube-api-access-tcb5l\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.931410 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-rabbitmq-plugins\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.931481 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-erlang-cookie-secret\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.931614 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-server-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.931654 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-plugins-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.931686 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-pod-info\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:01 crc kubenswrapper[4711]: I0123 08:39:01.931712 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-rabbitmq-confd\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.032413 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-rabbitmq-plugins\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.032457 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-erlang-cookie-secret\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.032495 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-server-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.032536 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-plugins-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.032558 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-pod-info\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.032578 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-rabbitmq-confd\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.032602 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-rabbitmq-erlang-cookie\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.032628 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d5ba534f-86cf-4935-86e8-33e1fc37a703\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d5ba534f-86cf-4935-86e8-33e1fc37a703\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.032651 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcb5l\" (UniqueName: \"kubernetes.io/projected/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-kube-api-access-tcb5l\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.032987 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-rabbitmq-plugins\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.034287 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-rabbitmq-erlang-cookie\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.034654 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-plugins-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.036578 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-erlang-cookie-secret\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.037322 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-rabbitmq-confd\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.037486 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-pod-info\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.038104 4711 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.038141 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d5ba534f-86cf-4935-86e8-33e1fc37a703\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d5ba534f-86cf-4935-86e8-33e1fc37a703\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0134afd1895b2e385fc4a1e9094f63d241270348b876e691bd43322dcd7c388e/globalmount\"" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.058343 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcb5l\" (UniqueName: \"kubernetes.io/projected/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-kube-api-access-tcb5l\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.059318 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d5ba534f-86cf-4935-86e8-33e1fc37a703\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d5ba534f-86cf-4935-86e8-33e1fc37a703\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.096843 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/rabbitmq-cell1-server-0"] Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.097914 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.099449 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-cell1-erlang-cookie" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.100257 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-cell1-plugins-conf" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.101061 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-cell1-server-conf" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.101779 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-cell1-default-user" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.101897 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-cell1-server-dockercfg-42tv5" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.111639 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-cell1-server-0"] Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.218599 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.226012 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf-server-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.235761 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/00e53f46-c48c-4f2c-83aa-088781b82d46-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.235820 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0f4b6886-6437-4520-97ba-a40837c10e65\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0f4b6886-6437-4520-97ba-a40837c10e65\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.235841 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/00e53f46-c48c-4f2c-83aa-088781b82d46-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.236123 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/00e53f46-c48c-4f2c-83aa-088781b82d46-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.236273 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/00e53f46-c48c-4f2c-83aa-088781b82d46-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.236322 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md2h4\" (UniqueName: \"kubernetes.io/projected/00e53f46-c48c-4f2c-83aa-088781b82d46-kube-api-access-md2h4\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.236440 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/00e53f46-c48c-4f2c-83aa-088781b82d46-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.236554 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/00e53f46-c48c-4f2c-83aa-088781b82d46-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.236709 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/00e53f46-c48c-4f2c-83aa-088781b82d46-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.250383 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.337990 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/00e53f46-c48c-4f2c-83aa-088781b82d46-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.338060 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/00e53f46-c48c-4f2c-83aa-088781b82d46-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.338104 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/00e53f46-c48c-4f2c-83aa-088781b82d46-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.338124 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md2h4\" (UniqueName: \"kubernetes.io/projected/00e53f46-c48c-4f2c-83aa-088781b82d46-kube-api-access-md2h4\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.338149 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/00e53f46-c48c-4f2c-83aa-088781b82d46-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.338181 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/00e53f46-c48c-4f2c-83aa-088781b82d46-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.338237 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/00e53f46-c48c-4f2c-83aa-088781b82d46-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.338281 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/00e53f46-c48c-4f2c-83aa-088781b82d46-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.338309 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0f4b6886-6437-4520-97ba-a40837c10e65\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0f4b6886-6437-4520-97ba-a40837c10e65\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.339665 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/00e53f46-c48c-4f2c-83aa-088781b82d46-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.339755 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/00e53f46-c48c-4f2c-83aa-088781b82d46-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.340275 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/00e53f46-c48c-4f2c-83aa-088781b82d46-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.340386 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/00e53f46-c48c-4f2c-83aa-088781b82d46-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.343177 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/00e53f46-c48c-4f2c-83aa-088781b82d46-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.345169 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/00e53f46-c48c-4f2c-83aa-088781b82d46-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.353355 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/00e53f46-c48c-4f2c-83aa-088781b82d46-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.356126 4711 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.356170 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0f4b6886-6437-4520-97ba-a40837c10e65\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0f4b6886-6437-4520-97ba-a40837c10e65\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/aa10f6506c1a9cca3e4f878fc9f7e47179872598e302fbb6132b8b0c33f8303e/globalmount\"" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.363190 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md2h4\" (UniqueName: \"kubernetes.io/projected/00e53f46-c48c-4f2c-83aa-088781b82d46-kube-api-access-md2h4\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.453986 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/openstack-galera-0"] Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.455948 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.462481 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-scripts" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.462945 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"galera-openstack-dockercfg-bnm79" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.463070 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-config-data" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.463128 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"cert-galera-openstack-svc" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.491944 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"combined-ca-bundle" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.591241 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0f4b6886-6437-4520-97ba-a40837c10e65\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0f4b6886-6437-4520-97ba-a40837c10e65\") pod \"rabbitmq-cell1-server-0\" (UID: \"00e53f46-c48c-4f2c-83aa-088781b82d46\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.620188 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstack-galera-0"] Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.643124 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a21f5317-eee2-4f13-9df5-40c48bce5aaf-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.643189 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7kj4\" (UniqueName: \"kubernetes.io/projected/a21f5317-eee2-4f13-9df5-40c48bce5aaf-kube-api-access-h7kj4\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.643219 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a21f5317-eee2-4f13-9df5-40c48bce5aaf-kolla-config\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.643260 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-df34a108-4419-401d-9310-6f577c6e7c0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-df34a108-4419-401d-9310-6f577c6e7c0d\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.643279 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a21f5317-eee2-4f13-9df5-40c48bce5aaf-config-data-default\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.643300 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a21f5317-eee2-4f13-9df5-40c48bce5aaf-operator-scripts\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.643332 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a21f5317-eee2-4f13-9df5-40c48bce5aaf-config-data-generated\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.643417 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21f5317-eee2-4f13-9df5-40c48bce5aaf-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.729940 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.752279 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a21f5317-eee2-4f13-9df5-40c48bce5aaf-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.752336 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7kj4\" (UniqueName: \"kubernetes.io/projected/a21f5317-eee2-4f13-9df5-40c48bce5aaf-kube-api-access-h7kj4\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.752357 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a21f5317-eee2-4f13-9df5-40c48bce5aaf-kolla-config\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.752385 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-df34a108-4419-401d-9310-6f577c6e7c0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-df34a108-4419-401d-9310-6f577c6e7c0d\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.752403 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a21f5317-eee2-4f13-9df5-40c48bce5aaf-config-data-default\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.752421 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a21f5317-eee2-4f13-9df5-40c48bce5aaf-operator-scripts\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.752445 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a21f5317-eee2-4f13-9df5-40c48bce5aaf-config-data-generated\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.752526 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21f5317-eee2-4f13-9df5-40c48bce5aaf-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.753489 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a21f5317-eee2-4f13-9df5-40c48bce5aaf-kolla-config\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.754281 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a21f5317-eee2-4f13-9df5-40c48bce5aaf-config-data-generated\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.755001 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a21f5317-eee2-4f13-9df5-40c48bce5aaf-config-data-default\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.755069 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a21f5317-eee2-4f13-9df5-40c48bce5aaf-operator-scripts\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.760711 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a21f5317-eee2-4f13-9df5-40c48bce5aaf-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.761583 4711 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.761611 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-df34a108-4419-401d-9310-6f577c6e7c0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-df34a108-4419-401d-9310-6f577c6e7c0d\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ba3489e4de026a15c774e62772d186df2ec0a3ff0ce0cc092823e546fa797360/globalmount\"" pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.761617 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21f5317-eee2-4f13-9df5-40c48bce5aaf-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.773672 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7kj4\" (UniqueName: \"kubernetes.io/projected/a21f5317-eee2-4f13-9df5-40c48bce5aaf-kube-api-access-h7kj4\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.811238 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-df34a108-4419-401d-9310-6f577c6e7c0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-df34a108-4419-401d-9310-6f577c6e7c0d\") pod \"openstack-galera-0\" (UID: \"a21f5317-eee2-4f13-9df5-40c48bce5aaf\") " pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.950287 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/memcached-0"] Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.951408 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/memcached-0" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.954095 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"memcached-memcached-dockercfg-fzh8c" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.954166 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"memcached-config-data" Jan 23 08:39:02 crc kubenswrapper[4711]: I0123 08:39:02.978739 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/memcached-0"] Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.056408 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xl828\" (UniqueName: \"kubernetes.io/projected/e899df0c-3fb3-4d7b-b376-0a907dbc82a0-kube-api-access-xl828\") pod \"memcached-0\" (UID: \"e899df0c-3fb3-4d7b-b376-0a907dbc82a0\") " pod="nova-kuttl-default/memcached-0" Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.056606 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e899df0c-3fb3-4d7b-b376-0a907dbc82a0-kolla-config\") pod \"memcached-0\" (UID: \"e899df0c-3fb3-4d7b-b376-0a907dbc82a0\") " pod="nova-kuttl-default/memcached-0" Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.056639 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e899df0c-3fb3-4d7b-b376-0a907dbc82a0-config-data\") pod \"memcached-0\" (UID: \"e899df0c-3fb3-4d7b-b376-0a907dbc82a0\") " pod="nova-kuttl-default/memcached-0" Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.075082 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.158279 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e899df0c-3fb3-4d7b-b376-0a907dbc82a0-kolla-config\") pod \"memcached-0\" (UID: \"e899df0c-3fb3-4d7b-b376-0a907dbc82a0\") " pod="nova-kuttl-default/memcached-0" Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.158317 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e899df0c-3fb3-4d7b-b376-0a907dbc82a0-config-data\") pod \"memcached-0\" (UID: \"e899df0c-3fb3-4d7b-b376-0a907dbc82a0\") " pod="nova-kuttl-default/memcached-0" Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.158352 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xl828\" (UniqueName: \"kubernetes.io/projected/e899df0c-3fb3-4d7b-b376-0a907dbc82a0-kube-api-access-xl828\") pod \"memcached-0\" (UID: \"e899df0c-3fb3-4d7b-b376-0a907dbc82a0\") " pod="nova-kuttl-default/memcached-0" Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.160967 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e899df0c-3fb3-4d7b-b376-0a907dbc82a0-kolla-config\") pod \"memcached-0\" (UID: \"e899df0c-3fb3-4d7b-b376-0a907dbc82a0\") " pod="nova-kuttl-default/memcached-0" Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.161446 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e899df0c-3fb3-4d7b-b376-0a907dbc82a0-config-data\") pod \"memcached-0\" (UID: \"e899df0c-3fb3-4d7b-b376-0a907dbc82a0\") " pod="nova-kuttl-default/memcached-0" Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.164611 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-broadcaster-server-0"] Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.194633 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xl828\" (UniqueName: \"kubernetes.io/projected/e899df0c-3fb3-4d7b-b376-0a907dbc82a0-kube-api-access-xl828\") pod \"memcached-0\" (UID: \"e899df0c-3fb3-4d7b-b376-0a907dbc82a0\") " pod="nova-kuttl-default/memcached-0" Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.237398 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-server-0"] Jan 23 08:39:03 crc kubenswrapper[4711]: W0123 08:39:03.242413 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf970c1db_48d5_4b49_afc1_eee7e1289da9.slice/crio-511e324a5a254c6a25a937e65ddb04c7abf6108c7542ae8ae4e52f3973a030d7 WatchSource:0}: Error finding container 511e324a5a254c6a25a937e65ddb04c7abf6108c7542ae8ae4e52f3973a030d7: Status 404 returned error can't find the container with id 511e324a5a254c6a25a937e65ddb04c7abf6108c7542ae8ae4e52f3973a030d7 Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.299060 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/memcached-0" Jan 23 08:39:03 crc kubenswrapper[4711]: W0123 08:39:03.471983 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod00e53f46_c48c_4f2c_83aa_088781b82d46.slice/crio-969ebe0a57f42fb41e1010dcbe6abf5b1bac2018c28e864afd18e7b7262b2bc0 WatchSource:0}: Error finding container 969ebe0a57f42fb41e1010dcbe6abf5b1bac2018c28e864afd18e7b7262b2bc0: Status 404 returned error can't find the container with id 969ebe0a57f42fb41e1010dcbe6abf5b1bac2018c28e864afd18e7b7262b2bc0 Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.485201 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-cell1-server-0"] Jan 23 08:39:03 crc kubenswrapper[4711]: W0123 08:39:03.756719 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda21f5317_eee2_4f13_9df5_40c48bce5aaf.slice/crio-6a0a50d0ae3b24b7b377a0cd7d4a03758c7437d07aeb6d283c02a8ba51dc27e9 WatchSource:0}: Error finding container 6a0a50d0ae3b24b7b377a0cd7d4a03758c7437d07aeb6d283c02a8ba51dc27e9: Status 404 returned error can't find the container with id 6a0a50d0ae3b24b7b377a0cd7d4a03758c7437d07aeb6d283c02a8ba51dc27e9 Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.801724 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstack-galera-0"] Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.824292 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/memcached-0"] Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.941401 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-server-0" event={"ID":"f970c1db-48d5-4b49-afc1-eee7e1289da9","Type":"ContainerStarted","Data":"511e324a5a254c6a25a937e65ddb04c7abf6108c7542ae8ae4e52f3973a030d7"} Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.945486 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" event={"ID":"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf","Type":"ContainerStarted","Data":"96d252e95ea8fb826f17716d92f62d6f6154da973824405baa9c0580df73f3b4"} Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.946586 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-cell1-server-0" event={"ID":"00e53f46-c48c-4f2c-83aa-088781b82d46","Type":"ContainerStarted","Data":"969ebe0a57f42fb41e1010dcbe6abf5b1bac2018c28e864afd18e7b7262b2bc0"} Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.947722 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-galera-0" event={"ID":"a21f5317-eee2-4f13-9df5-40c48bce5aaf","Type":"ContainerStarted","Data":"6a0a50d0ae3b24b7b377a0cd7d4a03758c7437d07aeb6d283c02a8ba51dc27e9"} Jan 23 08:39:03 crc kubenswrapper[4711]: I0123 08:39:03.949199 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/memcached-0" event={"ID":"e899df0c-3fb3-4d7b-b376-0a907dbc82a0","Type":"ContainerStarted","Data":"ec34e100380a4fa04d6d51480175338095bb071906994e75335c192dc7c2ca42"} Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.065637 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/openstack-cell1-galera-0"] Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.067544 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.070500 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-cell1-config-data" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.070562 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"galera-openstack-cell1-dockercfg-wpzk7" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.070745 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"cert-galera-openstack-cell1-svc" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.071015 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-cell1-scripts" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.074581 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstack-cell1-galera-0"] Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.179517 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/13ccba34-03d8-4429-bace-b75cb5d12763-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.179589 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ccba34-03d8-4429-bace-b75cb5d12763-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.179713 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/13ccba34-03d8-4429-bace-b75cb5d12763-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.179755 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ccba34-03d8-4429-bace-b75cb5d12763-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.179853 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/13ccba34-03d8-4429-bace-b75cb5d12763-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.179896 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a90bb5d0-7db7-4b2a-ad00-8fb6bbc800f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a90bb5d0-7db7-4b2a-ad00-8fb6bbc800f8\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.179938 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4srcf\" (UniqueName: \"kubernetes.io/projected/13ccba34-03d8-4429-bace-b75cb5d12763-kube-api-access-4srcf\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.180018 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/13ccba34-03d8-4429-bace-b75cb5d12763-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.281765 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/13ccba34-03d8-4429-bace-b75cb5d12763-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.281819 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/13ccba34-03d8-4429-bace-b75cb5d12763-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.281842 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ccba34-03d8-4429-bace-b75cb5d12763-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.281878 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/13ccba34-03d8-4429-bace-b75cb5d12763-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.281893 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ccba34-03d8-4429-bace-b75cb5d12763-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.281911 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/13ccba34-03d8-4429-bace-b75cb5d12763-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.281969 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a90bb5d0-7db7-4b2a-ad00-8fb6bbc800f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a90bb5d0-7db7-4b2a-ad00-8fb6bbc800f8\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.281990 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4srcf\" (UniqueName: \"kubernetes.io/projected/13ccba34-03d8-4429-bace-b75cb5d12763-kube-api-access-4srcf\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.282317 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/13ccba34-03d8-4429-bace-b75cb5d12763-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.283250 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/13ccba34-03d8-4429-bace-b75cb5d12763-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.284188 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/13ccba34-03d8-4429-bace-b75cb5d12763-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.284497 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ccba34-03d8-4429-bace-b75cb5d12763-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.285106 4711 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.285155 4711 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a90bb5d0-7db7-4b2a-ad00-8fb6bbc800f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a90bb5d0-7db7-4b2a-ad00-8fb6bbc800f8\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a962a054baa30bb2a751fb4a2fc76ace328488e56e5835e39431e58c1fa65cfb/globalmount\"" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.289523 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ccba34-03d8-4429-bace-b75cb5d12763-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.300393 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/13ccba34-03d8-4429-bace-b75cb5d12763-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.304569 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4srcf\" (UniqueName: \"kubernetes.io/projected/13ccba34-03d8-4429-bace-b75cb5d12763-kube-api-access-4srcf\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.314789 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a90bb5d0-7db7-4b2a-ad00-8fb6bbc800f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a90bb5d0-7db7-4b2a-ad00-8fb6bbc800f8\") pod \"openstack-cell1-galera-0\" (UID: \"13ccba34-03d8-4429-bace-b75cb5d12763\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.426278 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.875693 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstack-cell1-galera-0"] Jan 23 08:39:04 crc kubenswrapper[4711]: W0123 08:39:04.882989 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13ccba34_03d8_4429_bace_b75cb5d12763.slice/crio-49caad9c443b4237e8df82e4b263ec491964e19a6fd6ed377b1ecfa131b7b271 WatchSource:0}: Error finding container 49caad9c443b4237e8df82e4b263ec491964e19a6fd6ed377b1ecfa131b7b271: Status 404 returned error can't find the container with id 49caad9c443b4237e8df82e4b263ec491964e19a6fd6ed377b1ecfa131b7b271 Jan 23 08:39:04 crc kubenswrapper[4711]: I0123 08:39:04.957444 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-cell1-galera-0" event={"ID":"13ccba34-03d8-4429-bace-b75cb5d12763","Type":"ContainerStarted","Data":"49caad9c443b4237e8df82e4b263ec491964e19a6fd6ed377b1ecfa131b7b271"} Jan 23 08:39:19 crc kubenswrapper[4711]: E0123 08:39:19.021753 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Jan 23 08:39:19 crc kubenswrapper[4711]: E0123 08:39:19.022898 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n685hffh669h549h684h659h557h68h595hdhf5hbdhb5h55dh585h5c9h5f6h69hffhf9hf7h5d4h5cfhd6h9ch86hbch69h68bhfch656h67fq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xl828,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_nova-kuttl-default(e899df0c-3fb3-4d7b-b376-0a907dbc82a0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:39:19 crc kubenswrapper[4711]: E0123 08:39:19.024116 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="nova-kuttl-default/memcached-0" podUID="e899df0c-3fb3-4d7b-b376-0a907dbc82a0" Jan 23 08:39:19 crc kubenswrapper[4711]: E0123 08:39:19.331696 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="nova-kuttl-default/memcached-0" podUID="e899df0c-3fb3-4d7b-b376-0a907dbc82a0" Jan 23 08:39:20 crc kubenswrapper[4711]: E0123 08:39:20.201635 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Jan 23 08:39:20 crc kubenswrapper[4711]: E0123 08:39:20.201812 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tcb5l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000710000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-broadcaster-server-0_nova-kuttl-default(b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:39:20 crc kubenswrapper[4711]: E0123 08:39:20.206607 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" podUID="b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf" Jan 23 08:39:20 crc kubenswrapper[4711]: E0123 08:39:20.248643 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Jan 23 08:39:20 crc kubenswrapper[4711]: E0123 08:39:20.249381 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-flrbn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000710000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_nova-kuttl-default(f970c1db-48d5-4b49-afc1-eee7e1289da9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:39:20 crc kubenswrapper[4711]: E0123 08:39:20.255610 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="nova-kuttl-default/rabbitmq-server-0" podUID="f970c1db-48d5-4b49-afc1-eee7e1289da9" Jan 23 08:39:20 crc kubenswrapper[4711]: E0123 08:39:20.256454 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Jan 23 08:39:20 crc kubenswrapper[4711]: E0123 08:39:20.256650 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-md2h4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000710000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_nova-kuttl-default(00e53f46-c48c-4f2c-83aa-088781b82d46): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:39:20 crc kubenswrapper[4711]: E0123 08:39:20.257884 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="nova-kuttl-default/rabbitmq-cell1-server-0" podUID="00e53f46-c48c-4f2c-83aa-088781b82d46" Jan 23 08:39:20 crc kubenswrapper[4711]: E0123 08:39:20.337079 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="nova-kuttl-default/rabbitmq-cell1-server-0" podUID="00e53f46-c48c-4f2c-83aa-088781b82d46" Jan 23 08:39:20 crc kubenswrapper[4711]: E0123 08:39:20.337121 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="nova-kuttl-default/rabbitmq-server-0" podUID="f970c1db-48d5-4b49-afc1-eee7e1289da9" Jan 23 08:39:20 crc kubenswrapper[4711]: E0123 08:39:20.337635 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" podUID="b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf" Jan 23 08:39:21 crc kubenswrapper[4711]: I0123 08:39:21.342996 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-cell1-galera-0" event={"ID":"13ccba34-03d8-4429-bace-b75cb5d12763","Type":"ContainerStarted","Data":"ccac8d097a7e6e7efe1cd1b2259883a9bfe64ec8a45d2c23921ccc572d324748"} Jan 23 08:39:21 crc kubenswrapper[4711]: I0123 08:39:21.345227 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-galera-0" event={"ID":"a21f5317-eee2-4f13-9df5-40c48bce5aaf","Type":"ContainerStarted","Data":"8e8bdb54c2e89879a42f4cecd32ac380318888da30e47876dc5996edb2ab5291"} Jan 23 08:39:25 crc kubenswrapper[4711]: I0123 08:39:25.373789 4711 generic.go:334] "Generic (PLEG): container finished" podID="13ccba34-03d8-4429-bace-b75cb5d12763" containerID="ccac8d097a7e6e7efe1cd1b2259883a9bfe64ec8a45d2c23921ccc572d324748" exitCode=0 Jan 23 08:39:25 crc kubenswrapper[4711]: I0123 08:39:25.373865 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-cell1-galera-0" event={"ID":"13ccba34-03d8-4429-bace-b75cb5d12763","Type":"ContainerDied","Data":"ccac8d097a7e6e7efe1cd1b2259883a9bfe64ec8a45d2c23921ccc572d324748"} Jan 23 08:39:25 crc kubenswrapper[4711]: I0123 08:39:25.376813 4711 generic.go:334] "Generic (PLEG): container finished" podID="a21f5317-eee2-4f13-9df5-40c48bce5aaf" containerID="8e8bdb54c2e89879a42f4cecd32ac380318888da30e47876dc5996edb2ab5291" exitCode=0 Jan 23 08:39:25 crc kubenswrapper[4711]: I0123 08:39:25.376844 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-galera-0" event={"ID":"a21f5317-eee2-4f13-9df5-40c48bce5aaf","Type":"ContainerDied","Data":"8e8bdb54c2e89879a42f4cecd32ac380318888da30e47876dc5996edb2ab5291"} Jan 23 08:39:26 crc kubenswrapper[4711]: I0123 08:39:26.388366 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-cell1-galera-0" event={"ID":"13ccba34-03d8-4429-bace-b75cb5d12763","Type":"ContainerStarted","Data":"42cd7829df23daaa17191299d32684ebccf9616812f941d1c259b6d27799f80e"} Jan 23 08:39:26 crc kubenswrapper[4711]: I0123 08:39:26.390609 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-galera-0" event={"ID":"a21f5317-eee2-4f13-9df5-40c48bce5aaf","Type":"ContainerStarted","Data":"6f43634da2dbfda49d1ed465083ced90dae92d16eeb5f2a379b89bb97fdfc38b"} Jan 23 08:39:26 crc kubenswrapper[4711]: I0123 08:39:26.424915 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/openstack-cell1-galera-0" podStartSLOduration=8.104678625 podStartE2EDuration="23.42488319s" podCreationTimestamp="2026-01-23 08:39:03 +0000 UTC" firstStartedPulling="2026-01-23 08:39:04.887040102 +0000 UTC m=+1130.459996470" lastFinishedPulling="2026-01-23 08:39:20.207244667 +0000 UTC m=+1145.780201035" observedRunningTime="2026-01-23 08:39:26.413130533 +0000 UTC m=+1151.986086901" watchObservedRunningTime="2026-01-23 08:39:26.42488319 +0000 UTC m=+1151.997839578" Jan 23 08:39:26 crc kubenswrapper[4711]: I0123 08:39:26.454805 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/openstack-galera-0" podStartSLOduration=8.964620485 podStartE2EDuration="25.454771971s" podCreationTimestamp="2026-01-23 08:39:01 +0000 UTC" firstStartedPulling="2026-01-23 08:39:03.765499234 +0000 UTC m=+1129.338455602" lastFinishedPulling="2026-01-23 08:39:20.25565071 +0000 UTC m=+1145.828607088" observedRunningTime="2026-01-23 08:39:26.437612851 +0000 UTC m=+1152.010569219" watchObservedRunningTime="2026-01-23 08:39:26.454771971 +0000 UTC m=+1152.027728349" Jan 23 08:39:33 crc kubenswrapper[4711]: I0123 08:39:33.076304 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:33 crc kubenswrapper[4711]: I0123 08:39:33.077670 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:34 crc kubenswrapper[4711]: I0123 08:39:34.427781 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:34 crc kubenswrapper[4711]: I0123 08:39:34.428100 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:35 crc kubenswrapper[4711]: I0123 08:39:35.346915 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:35 crc kubenswrapper[4711]: I0123 08:39:35.429931 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 23 08:39:35 crc kubenswrapper[4711]: I0123 08:39:35.463325 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/memcached-0" event={"ID":"e899df0c-3fb3-4d7b-b376-0a907dbc82a0","Type":"ContainerStarted","Data":"9fbbf7f91d903627c0a556a81a20471789b5dbffd6cb7fd13170bbf0ce77f79b"} Jan 23 08:39:35 crc kubenswrapper[4711]: I0123 08:39:35.500993 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/memcached-0" podStartSLOduration=2.649334591 podStartE2EDuration="33.500963234s" podCreationTimestamp="2026-01-23 08:39:02 +0000 UTC" firstStartedPulling="2026-01-23 08:39:03.832651165 +0000 UTC m=+1129.405607533" lastFinishedPulling="2026-01-23 08:39:34.684279808 +0000 UTC m=+1160.257236176" observedRunningTime="2026-01-23 08:39:35.493223965 +0000 UTC m=+1161.066180333" watchObservedRunningTime="2026-01-23 08:39:35.500963234 +0000 UTC m=+1161.073919612" Jan 23 08:39:37 crc kubenswrapper[4711]: I0123 08:39:37.189475 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:37 crc kubenswrapper[4711]: I0123 08:39:37.256578 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/openstack-galera-0" Jan 23 08:39:37 crc kubenswrapper[4711]: I0123 08:39:37.481761 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-server-0" event={"ID":"f970c1db-48d5-4b49-afc1-eee7e1289da9","Type":"ContainerStarted","Data":"eb6682a48c60e2b6d9a2e6be3d59b5fc1082f4615530c40f87cd6b782a639fd7"} Jan 23 08:39:37 crc kubenswrapper[4711]: I0123 08:39:37.481812 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" event={"ID":"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf","Type":"ContainerStarted","Data":"7afc35d31f32a8f230f06c62d2a4b358950b7b4c403215810e3f8d528f90df85"} Jan 23 08:39:37 crc kubenswrapper[4711]: I0123 08:39:37.481984 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-cell1-server-0" event={"ID":"00e53f46-c48c-4f2c-83aa-088781b82d46","Type":"ContainerStarted","Data":"edc7556801c37d653be8fa24490d02038dff85bb1351aa805c115fa4c1f52570"} Jan 23 08:39:38 crc kubenswrapper[4711]: I0123 08:39:38.300072 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/memcached-0" Jan 23 08:39:41 crc kubenswrapper[4711]: I0123 08:39:41.535368 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/root-account-create-update-bjxvd"] Jan 23 08:39:41 crc kubenswrapper[4711]: I0123 08:39:41.536709 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-bjxvd" Jan 23 08:39:41 crc kubenswrapper[4711]: I0123 08:39:41.539807 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstack-mariadb-root-db-secret" Jan 23 08:39:41 crc kubenswrapper[4711]: I0123 08:39:41.549559 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-bjxvd"] Jan 23 08:39:41 crc kubenswrapper[4711]: I0123 08:39:41.639719 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjw6t\" (UniqueName: \"kubernetes.io/projected/2d95b148-4049-4cf7-96df-f3c712abe269-kube-api-access-rjw6t\") pod \"root-account-create-update-bjxvd\" (UID: \"2d95b148-4049-4cf7-96df-f3c712abe269\") " pod="nova-kuttl-default/root-account-create-update-bjxvd" Jan 23 08:39:41 crc kubenswrapper[4711]: I0123 08:39:41.639875 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d95b148-4049-4cf7-96df-f3c712abe269-operator-scripts\") pod \"root-account-create-update-bjxvd\" (UID: \"2d95b148-4049-4cf7-96df-f3c712abe269\") " pod="nova-kuttl-default/root-account-create-update-bjxvd" Jan 23 08:39:41 crc kubenswrapper[4711]: I0123 08:39:41.741731 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d95b148-4049-4cf7-96df-f3c712abe269-operator-scripts\") pod \"root-account-create-update-bjxvd\" (UID: \"2d95b148-4049-4cf7-96df-f3c712abe269\") " pod="nova-kuttl-default/root-account-create-update-bjxvd" Jan 23 08:39:41 crc kubenswrapper[4711]: I0123 08:39:41.741916 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjw6t\" (UniqueName: \"kubernetes.io/projected/2d95b148-4049-4cf7-96df-f3c712abe269-kube-api-access-rjw6t\") pod \"root-account-create-update-bjxvd\" (UID: \"2d95b148-4049-4cf7-96df-f3c712abe269\") " pod="nova-kuttl-default/root-account-create-update-bjxvd" Jan 23 08:39:41 crc kubenswrapper[4711]: I0123 08:39:41.742744 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d95b148-4049-4cf7-96df-f3c712abe269-operator-scripts\") pod \"root-account-create-update-bjxvd\" (UID: \"2d95b148-4049-4cf7-96df-f3c712abe269\") " pod="nova-kuttl-default/root-account-create-update-bjxvd" Jan 23 08:39:41 crc kubenswrapper[4711]: I0123 08:39:41.770741 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjw6t\" (UniqueName: \"kubernetes.io/projected/2d95b148-4049-4cf7-96df-f3c712abe269-kube-api-access-rjw6t\") pod \"root-account-create-update-bjxvd\" (UID: \"2d95b148-4049-4cf7-96df-f3c712abe269\") " pod="nova-kuttl-default/root-account-create-update-bjxvd" Jan 23 08:39:41 crc kubenswrapper[4711]: I0123 08:39:41.857965 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-bjxvd" Jan 23 08:39:42 crc kubenswrapper[4711]: I0123 08:39:42.420002 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-bjxvd"] Jan 23 08:39:42 crc kubenswrapper[4711]: I0123 08:39:42.517962 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-bjxvd" event={"ID":"2d95b148-4049-4cf7-96df-f3c712abe269","Type":"ContainerStarted","Data":"de1eb5510da4de04b858a5237b24e854fb65cf1d0ae553d07982e27e32818877"} Jan 23 08:39:42 crc kubenswrapper[4711]: I0123 08:39:42.897488 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-db-create-hdxrj"] Jan 23 08:39:42 crc kubenswrapper[4711]: I0123 08:39:42.898755 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-create-hdxrj" Jan 23 08:39:42 crc kubenswrapper[4711]: I0123 08:39:42.913433 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-8a69-account-create-update-w6dvl"] Jan 23 08:39:42 crc kubenswrapper[4711]: I0123 08:39:42.914746 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" Jan 23 08:39:42 crc kubenswrapper[4711]: I0123 08:39:42.918446 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-db-secret" Jan 23 08:39:42 crc kubenswrapper[4711]: I0123 08:39:42.934501 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-db-create-hdxrj"] Jan 23 08:39:42 crc kubenswrapper[4711]: I0123 08:39:42.942996 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-8a69-account-create-update-w6dvl"] Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.063260 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qjn6\" (UniqueName: \"kubernetes.io/projected/9a72521f-2374-401f-99fe-a4c1c9a06aef-kube-api-access-2qjn6\") pod \"keystone-8a69-account-create-update-w6dvl\" (UID: \"9a72521f-2374-401f-99fe-a4c1c9a06aef\") " pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.063384 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwzgn\" (UniqueName: \"kubernetes.io/projected/4af8008c-c038-45dd-9384-485e4ba2c730-kube-api-access-pwzgn\") pod \"keystone-db-create-hdxrj\" (UID: \"4af8008c-c038-45dd-9384-485e4ba2c730\") " pod="nova-kuttl-default/keystone-db-create-hdxrj" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.063440 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4af8008c-c038-45dd-9384-485e4ba2c730-operator-scripts\") pod \"keystone-db-create-hdxrj\" (UID: \"4af8008c-c038-45dd-9384-485e4ba2c730\") " pod="nova-kuttl-default/keystone-db-create-hdxrj" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.063575 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a72521f-2374-401f-99fe-a4c1c9a06aef-operator-scripts\") pod \"keystone-8a69-account-create-update-w6dvl\" (UID: \"9a72521f-2374-401f-99fe-a4c1c9a06aef\") " pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.165071 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qjn6\" (UniqueName: \"kubernetes.io/projected/9a72521f-2374-401f-99fe-a4c1c9a06aef-kube-api-access-2qjn6\") pod \"keystone-8a69-account-create-update-w6dvl\" (UID: \"9a72521f-2374-401f-99fe-a4c1c9a06aef\") " pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.165188 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwzgn\" (UniqueName: \"kubernetes.io/projected/4af8008c-c038-45dd-9384-485e4ba2c730-kube-api-access-pwzgn\") pod \"keystone-db-create-hdxrj\" (UID: \"4af8008c-c038-45dd-9384-485e4ba2c730\") " pod="nova-kuttl-default/keystone-db-create-hdxrj" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.165247 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4af8008c-c038-45dd-9384-485e4ba2c730-operator-scripts\") pod \"keystone-db-create-hdxrj\" (UID: \"4af8008c-c038-45dd-9384-485e4ba2c730\") " pod="nova-kuttl-default/keystone-db-create-hdxrj" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.165289 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a72521f-2374-401f-99fe-a4c1c9a06aef-operator-scripts\") pod \"keystone-8a69-account-create-update-w6dvl\" (UID: \"9a72521f-2374-401f-99fe-a4c1c9a06aef\") " pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.166189 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a72521f-2374-401f-99fe-a4c1c9a06aef-operator-scripts\") pod \"keystone-8a69-account-create-update-w6dvl\" (UID: \"9a72521f-2374-401f-99fe-a4c1c9a06aef\") " pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.166190 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4af8008c-c038-45dd-9384-485e4ba2c730-operator-scripts\") pod \"keystone-db-create-hdxrj\" (UID: \"4af8008c-c038-45dd-9384-485e4ba2c730\") " pod="nova-kuttl-default/keystone-db-create-hdxrj" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.166269 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/placement-db-create-pgw2h"] Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.167287 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-create-pgw2h" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.177605 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-db-create-pgw2h"] Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.188139 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwzgn\" (UniqueName: \"kubernetes.io/projected/4af8008c-c038-45dd-9384-485e4ba2c730-kube-api-access-pwzgn\") pod \"keystone-db-create-hdxrj\" (UID: \"4af8008c-c038-45dd-9384-485e4ba2c730\") " pod="nova-kuttl-default/keystone-db-create-hdxrj" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.189992 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qjn6\" (UniqueName: \"kubernetes.io/projected/9a72521f-2374-401f-99fe-a4c1c9a06aef-kube-api-access-2qjn6\") pod \"keystone-8a69-account-create-update-w6dvl\" (UID: \"9a72521f-2374-401f-99fe-a4c1c9a06aef\") " pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.219312 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-create-hdxrj" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.239877 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.270607 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcq4g\" (UniqueName: \"kubernetes.io/projected/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f-kube-api-access-tcq4g\") pod \"placement-db-create-pgw2h\" (UID: \"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f\") " pod="nova-kuttl-default/placement-db-create-pgw2h" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.272184 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f-operator-scripts\") pod \"placement-db-create-pgw2h\" (UID: \"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f\") " pod="nova-kuttl-default/placement-db-create-pgw2h" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.277310 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/placement-728b-account-create-update-nllmn"] Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.278460 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.280993 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-db-secret" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.284739 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-728b-account-create-update-nllmn"] Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.300709 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/memcached-0" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.373872 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15d700a2-d850-4643-b834-4bcad727b44e-operator-scripts\") pod \"placement-728b-account-create-update-nllmn\" (UID: \"15d700a2-d850-4643-b834-4bcad727b44e\") " pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.374179 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcxwn\" (UniqueName: \"kubernetes.io/projected/15d700a2-d850-4643-b834-4bcad727b44e-kube-api-access-xcxwn\") pod \"placement-728b-account-create-update-nllmn\" (UID: \"15d700a2-d850-4643-b834-4bcad727b44e\") " pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.374238 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f-operator-scripts\") pod \"placement-db-create-pgw2h\" (UID: \"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f\") " pod="nova-kuttl-default/placement-db-create-pgw2h" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.374276 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcq4g\" (UniqueName: \"kubernetes.io/projected/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f-kube-api-access-tcq4g\") pod \"placement-db-create-pgw2h\" (UID: \"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f\") " pod="nova-kuttl-default/placement-db-create-pgw2h" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.374944 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f-operator-scripts\") pod \"placement-db-create-pgw2h\" (UID: \"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f\") " pod="nova-kuttl-default/placement-db-create-pgw2h" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.395074 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcq4g\" (UniqueName: \"kubernetes.io/projected/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f-kube-api-access-tcq4g\") pod \"placement-db-create-pgw2h\" (UID: \"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f\") " pod="nova-kuttl-default/placement-db-create-pgw2h" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.477109 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15d700a2-d850-4643-b834-4bcad727b44e-operator-scripts\") pod \"placement-728b-account-create-update-nllmn\" (UID: \"15d700a2-d850-4643-b834-4bcad727b44e\") " pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.477157 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcxwn\" (UniqueName: \"kubernetes.io/projected/15d700a2-d850-4643-b834-4bcad727b44e-kube-api-access-xcxwn\") pod \"placement-728b-account-create-update-nllmn\" (UID: \"15d700a2-d850-4643-b834-4bcad727b44e\") " pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.478682 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15d700a2-d850-4643-b834-4bcad727b44e-operator-scripts\") pod \"placement-728b-account-create-update-nllmn\" (UID: \"15d700a2-d850-4643-b834-4bcad727b44e\") " pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.484901 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-create-pgw2h" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.496881 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcxwn\" (UniqueName: \"kubernetes.io/projected/15d700a2-d850-4643-b834-4bcad727b44e-kube-api-access-xcxwn\") pod \"placement-728b-account-create-update-nllmn\" (UID: \"15d700a2-d850-4643-b834-4bcad727b44e\") " pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.670990 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.692757 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-db-create-hdxrj"] Jan 23 08:39:43 crc kubenswrapper[4711]: W0123 08:39:43.703105 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4af8008c_c038_45dd_9384_485e4ba2c730.slice/crio-a160b105f0786640014f6fb75f1b5895680efc8e28b7feca61bb98128f7c360c WatchSource:0}: Error finding container a160b105f0786640014f6fb75f1b5895680efc8e28b7feca61bb98128f7c360c: Status 404 returned error can't find the container with id a160b105f0786640014f6fb75f1b5895680efc8e28b7feca61bb98128f7c360c Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.828742 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-8a69-account-create-update-w6dvl"] Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.914766 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-db-create-pgw2h"] Jan 23 08:39:43 crc kubenswrapper[4711]: I0123 08:39:43.932775 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-728b-account-create-update-nllmn"] Jan 23 08:39:43 crc kubenswrapper[4711]: W0123 08:39:43.936375 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15d700a2_d850_4643_b834_4bcad727b44e.slice/crio-1af4b8387e9d0cd9fb3efb10515d8efebb12b7846358a94c7ae5cc8cd9dccb1a WatchSource:0}: Error finding container 1af4b8387e9d0cd9fb3efb10515d8efebb12b7846358a94c7ae5cc8cd9dccb1a: Status 404 returned error can't find the container with id 1af4b8387e9d0cd9fb3efb10515d8efebb12b7846358a94c7ae5cc8cd9dccb1a Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.534196 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-bjxvd" event={"ID":"2d95b148-4049-4cf7-96df-f3c712abe269","Type":"ContainerStarted","Data":"474c0a893709ccd3774540e8c5f5671626f570c2396bd51bd189fd711a76167e"} Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.536437 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" event={"ID":"15d700a2-d850-4643-b834-4bcad727b44e","Type":"ContainerStarted","Data":"b639ef42c087042ed6a0a326c87278d2598797e4fe2cca8142d6af4508176d31"} Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.536465 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" event={"ID":"15d700a2-d850-4643-b834-4bcad727b44e","Type":"ContainerStarted","Data":"1af4b8387e9d0cd9fb3efb10515d8efebb12b7846358a94c7ae5cc8cd9dccb1a"} Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.538852 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" event={"ID":"9a72521f-2374-401f-99fe-a4c1c9a06aef","Type":"ContainerStarted","Data":"b3f7b13c724fc52560795c7721a240709dc048102991934e8ec91a967703dbb0"} Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.538906 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" event={"ID":"9a72521f-2374-401f-99fe-a4c1c9a06aef","Type":"ContainerStarted","Data":"77b00f4477a4d4cb0e4ede8b62d4a70a80f24a4f7459f375326519c0bb193116"} Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.540364 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-create-hdxrj" event={"ID":"4af8008c-c038-45dd-9384-485e4ba2c730","Type":"ContainerStarted","Data":"e854188d28149236b862fc1b15d0c857f169663ac94ebaae065f1a2acf040996"} Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.540396 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-create-hdxrj" event={"ID":"4af8008c-c038-45dd-9384-485e4ba2c730","Type":"ContainerStarted","Data":"a160b105f0786640014f6fb75f1b5895680efc8e28b7feca61bb98128f7c360c"} Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.542619 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-create-pgw2h" event={"ID":"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f","Type":"ContainerStarted","Data":"ca0537f0e82aae5a9122cafb35004a42a6d9435e0a8430695366b0d9e42d63b3"} Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.542651 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-create-pgw2h" event={"ID":"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f","Type":"ContainerStarted","Data":"148be73c41eb0ceee6c25f2a76e37a1ccedad928beeb60752bf173fe9b30595d"} Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.549955 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/root-account-create-update-bjxvd" podStartSLOduration=3.549932765 podStartE2EDuration="3.549932765s" podCreationTimestamp="2026-01-23 08:39:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:39:44.547237069 +0000 UTC m=+1170.120193437" watchObservedRunningTime="2026-01-23 08:39:44.549932765 +0000 UTC m=+1170.122889133" Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.573593 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" podStartSLOduration=2.573564533 podStartE2EDuration="2.573564533s" podCreationTimestamp="2026-01-23 08:39:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:39:44.559777756 +0000 UTC m=+1170.132734124" watchObservedRunningTime="2026-01-23 08:39:44.573564533 +0000 UTC m=+1170.146521131" Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.594705 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-db-create-hdxrj" podStartSLOduration=2.594688369 podStartE2EDuration="2.594688369s" podCreationTimestamp="2026-01-23 08:39:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:39:44.591992493 +0000 UTC m=+1170.164948861" watchObservedRunningTime="2026-01-23 08:39:44.594688369 +0000 UTC m=+1170.167644737" Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.607866 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/placement-db-create-pgw2h" podStartSLOduration=1.6078497619999998 podStartE2EDuration="1.607849762s" podCreationTimestamp="2026-01-23 08:39:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:39:44.607352549 +0000 UTC m=+1170.180308917" watchObservedRunningTime="2026-01-23 08:39:44.607849762 +0000 UTC m=+1170.180806130" Jan 23 08:39:44 crc kubenswrapper[4711]: I0123 08:39:44.631214 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" podStartSLOduration=1.631177911 podStartE2EDuration="1.631177911s" podCreationTimestamp="2026-01-23 08:39:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:39:44.621633659 +0000 UTC m=+1170.194590037" watchObservedRunningTime="2026-01-23 08:39:44.631177911 +0000 UTC m=+1170.204134279" Jan 23 08:39:45 crc kubenswrapper[4711]: I0123 08:39:45.551849 4711 generic.go:334] "Generic (PLEG): container finished" podID="51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f" containerID="ca0537f0e82aae5a9122cafb35004a42a6d9435e0a8430695366b0d9e42d63b3" exitCode=0 Jan 23 08:39:45 crc kubenswrapper[4711]: I0123 08:39:45.552047 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-create-pgw2h" event={"ID":"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f","Type":"ContainerDied","Data":"ca0537f0e82aae5a9122cafb35004a42a6d9435e0a8430695366b0d9e42d63b3"} Jan 23 08:39:45 crc kubenswrapper[4711]: I0123 08:39:45.553354 4711 generic.go:334] "Generic (PLEG): container finished" podID="2d95b148-4049-4cf7-96df-f3c712abe269" containerID="474c0a893709ccd3774540e8c5f5671626f570c2396bd51bd189fd711a76167e" exitCode=0 Jan 23 08:39:45 crc kubenswrapper[4711]: I0123 08:39:45.553435 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-bjxvd" event={"ID":"2d95b148-4049-4cf7-96df-f3c712abe269","Type":"ContainerDied","Data":"474c0a893709ccd3774540e8c5f5671626f570c2396bd51bd189fd711a76167e"} Jan 23 08:39:45 crc kubenswrapper[4711]: I0123 08:39:45.568812 4711 generic.go:334] "Generic (PLEG): container finished" podID="15d700a2-d850-4643-b834-4bcad727b44e" containerID="b639ef42c087042ed6a0a326c87278d2598797e4fe2cca8142d6af4508176d31" exitCode=0 Jan 23 08:39:45 crc kubenswrapper[4711]: I0123 08:39:45.568922 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" event={"ID":"15d700a2-d850-4643-b834-4bcad727b44e","Type":"ContainerDied","Data":"b639ef42c087042ed6a0a326c87278d2598797e4fe2cca8142d6af4508176d31"} Jan 23 08:39:45 crc kubenswrapper[4711]: I0123 08:39:45.571468 4711 generic.go:334] "Generic (PLEG): container finished" podID="9a72521f-2374-401f-99fe-a4c1c9a06aef" containerID="b3f7b13c724fc52560795c7721a240709dc048102991934e8ec91a967703dbb0" exitCode=0 Jan 23 08:39:45 crc kubenswrapper[4711]: I0123 08:39:45.571567 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" event={"ID":"9a72521f-2374-401f-99fe-a4c1c9a06aef","Type":"ContainerDied","Data":"b3f7b13c724fc52560795c7721a240709dc048102991934e8ec91a967703dbb0"} Jan 23 08:39:45 crc kubenswrapper[4711]: I0123 08:39:45.573745 4711 generic.go:334] "Generic (PLEG): container finished" podID="4af8008c-c038-45dd-9384-485e4ba2c730" containerID="e854188d28149236b862fc1b15d0c857f169663ac94ebaae065f1a2acf040996" exitCode=0 Jan 23 08:39:45 crc kubenswrapper[4711]: I0123 08:39:45.573801 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-create-hdxrj" event={"ID":"4af8008c-c038-45dd-9384-485e4ba2c730","Type":"ContainerDied","Data":"e854188d28149236b862fc1b15d0c857f169663ac94ebaae065f1a2acf040996"} Jan 23 08:39:46 crc kubenswrapper[4711]: I0123 08:39:46.924966 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.034396 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-bjxvd" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.043350 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-create-pgw2h" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.044521 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qjn6\" (UniqueName: \"kubernetes.io/projected/9a72521f-2374-401f-99fe-a4c1c9a06aef-kube-api-access-2qjn6\") pod \"9a72521f-2374-401f-99fe-a4c1c9a06aef\" (UID: \"9a72521f-2374-401f-99fe-a4c1c9a06aef\") " Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.044602 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a72521f-2374-401f-99fe-a4c1c9a06aef-operator-scripts\") pod \"9a72521f-2374-401f-99fe-a4c1c9a06aef\" (UID: \"9a72521f-2374-401f-99fe-a4c1c9a06aef\") " Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.045239 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a72521f-2374-401f-99fe-a4c1c9a06aef-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9a72521f-2374-401f-99fe-a4c1c9a06aef" (UID: "9a72521f-2374-401f-99fe-a4c1c9a06aef"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.055655 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a72521f-2374-401f-99fe-a4c1c9a06aef-kube-api-access-2qjn6" (OuterVolumeSpecName: "kube-api-access-2qjn6") pod "9a72521f-2374-401f-99fe-a4c1c9a06aef" (UID: "9a72521f-2374-401f-99fe-a4c1c9a06aef"). InnerVolumeSpecName "kube-api-access-2qjn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.133871 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-create-hdxrj" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.141331 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.145968 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f-operator-scripts\") pod \"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f\" (UID: \"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f\") " Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.146078 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjw6t\" (UniqueName: \"kubernetes.io/projected/2d95b148-4049-4cf7-96df-f3c712abe269-kube-api-access-rjw6t\") pod \"2d95b148-4049-4cf7-96df-f3c712abe269\" (UID: \"2d95b148-4049-4cf7-96df-f3c712abe269\") " Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.146196 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcq4g\" (UniqueName: \"kubernetes.io/projected/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f-kube-api-access-tcq4g\") pod \"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f\" (UID: \"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f\") " Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.146260 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d95b148-4049-4cf7-96df-f3c712abe269-operator-scripts\") pod \"2d95b148-4049-4cf7-96df-f3c712abe269\" (UID: \"2d95b148-4049-4cf7-96df-f3c712abe269\") " Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.146832 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f" (UID: "51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.147782 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d95b148-4049-4cf7-96df-f3c712abe269-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2d95b148-4049-4cf7-96df-f3c712abe269" (UID: "2d95b148-4049-4cf7-96df-f3c712abe269"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.148610 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qjn6\" (UniqueName: \"kubernetes.io/projected/9a72521f-2374-401f-99fe-a4c1c9a06aef-kube-api-access-2qjn6\") on node \"crc\" DevicePath \"\"" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.148642 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a72521f-2374-401f-99fe-a4c1c9a06aef-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.149385 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d95b148-4049-4cf7-96df-f3c712abe269-kube-api-access-rjw6t" (OuterVolumeSpecName: "kube-api-access-rjw6t") pod "2d95b148-4049-4cf7-96df-f3c712abe269" (UID: "2d95b148-4049-4cf7-96df-f3c712abe269"). InnerVolumeSpecName "kube-api-access-rjw6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.151582 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f-kube-api-access-tcq4g" (OuterVolumeSpecName: "kube-api-access-tcq4g") pod "51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f" (UID: "51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f"). InnerVolumeSpecName "kube-api-access-tcq4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.249435 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4af8008c-c038-45dd-9384-485e4ba2c730-operator-scripts\") pod \"4af8008c-c038-45dd-9384-485e4ba2c730\" (UID: \"4af8008c-c038-45dd-9384-485e4ba2c730\") " Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.249498 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwzgn\" (UniqueName: \"kubernetes.io/projected/4af8008c-c038-45dd-9384-485e4ba2c730-kube-api-access-pwzgn\") pod \"4af8008c-c038-45dd-9384-485e4ba2c730\" (UID: \"4af8008c-c038-45dd-9384-485e4ba2c730\") " Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.249576 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcxwn\" (UniqueName: \"kubernetes.io/projected/15d700a2-d850-4643-b834-4bcad727b44e-kube-api-access-xcxwn\") pod \"15d700a2-d850-4643-b834-4bcad727b44e\" (UID: \"15d700a2-d850-4643-b834-4bcad727b44e\") " Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.249624 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15d700a2-d850-4643-b834-4bcad727b44e-operator-scripts\") pod \"15d700a2-d850-4643-b834-4bcad727b44e\" (UID: \"15d700a2-d850-4643-b834-4bcad727b44e\") " Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.249994 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.250005 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjw6t\" (UniqueName: \"kubernetes.io/projected/2d95b148-4049-4cf7-96df-f3c712abe269-kube-api-access-rjw6t\") on node \"crc\" DevicePath \"\"" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.250016 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcq4g\" (UniqueName: \"kubernetes.io/projected/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f-kube-api-access-tcq4g\") on node \"crc\" DevicePath \"\"" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.250026 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d95b148-4049-4cf7-96df-f3c712abe269-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.250411 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15d700a2-d850-4643-b834-4bcad727b44e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "15d700a2-d850-4643-b834-4bcad727b44e" (UID: "15d700a2-d850-4643-b834-4bcad727b44e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.250807 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4af8008c-c038-45dd-9384-485e4ba2c730-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4af8008c-c038-45dd-9384-485e4ba2c730" (UID: "4af8008c-c038-45dd-9384-485e4ba2c730"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.256418 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15d700a2-d850-4643-b834-4bcad727b44e-kube-api-access-xcxwn" (OuterVolumeSpecName: "kube-api-access-xcxwn") pod "15d700a2-d850-4643-b834-4bcad727b44e" (UID: "15d700a2-d850-4643-b834-4bcad727b44e"). InnerVolumeSpecName "kube-api-access-xcxwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.256722 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4af8008c-c038-45dd-9384-485e4ba2c730-kube-api-access-pwzgn" (OuterVolumeSpecName: "kube-api-access-pwzgn") pod "4af8008c-c038-45dd-9384-485e4ba2c730" (UID: "4af8008c-c038-45dd-9384-485e4ba2c730"). InnerVolumeSpecName "kube-api-access-pwzgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.351355 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15d700a2-d850-4643-b834-4bcad727b44e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.351383 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4af8008c-c038-45dd-9384-485e4ba2c730-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.351393 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwzgn\" (UniqueName: \"kubernetes.io/projected/4af8008c-c038-45dd-9384-485e4ba2c730-kube-api-access-pwzgn\") on node \"crc\" DevicePath \"\"" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.351403 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcxwn\" (UniqueName: \"kubernetes.io/projected/15d700a2-d850-4643-b834-4bcad727b44e-kube-api-access-xcxwn\") on node \"crc\" DevicePath \"\"" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.606307 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.606302 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-728b-account-create-update-nllmn" event={"ID":"15d700a2-d850-4643-b834-4bcad727b44e","Type":"ContainerDied","Data":"1af4b8387e9d0cd9fb3efb10515d8efebb12b7846358a94c7ae5cc8cd9dccb1a"} Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.606843 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1af4b8387e9d0cd9fb3efb10515d8efebb12b7846358a94c7ae5cc8cd9dccb1a" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.609770 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" event={"ID":"9a72521f-2374-401f-99fe-a4c1c9a06aef","Type":"ContainerDied","Data":"77b00f4477a4d4cb0e4ede8b62d4a70a80f24a4f7459f375326519c0bb193116"} Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.610355 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77b00f4477a4d4cb0e4ede8b62d4a70a80f24a4f7459f375326519c0bb193116" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.609869 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-8a69-account-create-update-w6dvl" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.612267 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-create-hdxrj" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.612445 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-create-hdxrj" event={"ID":"4af8008c-c038-45dd-9384-485e4ba2c730","Type":"ContainerDied","Data":"a160b105f0786640014f6fb75f1b5895680efc8e28b7feca61bb98128f7c360c"} Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.612546 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a160b105f0786640014f6fb75f1b5895680efc8e28b7feca61bb98128f7c360c" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.613957 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-create-pgw2h" event={"ID":"51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f","Type":"ContainerDied","Data":"148be73c41eb0ceee6c25f2a76e37a1ccedad928beeb60752bf173fe9b30595d"} Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.614078 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="148be73c41eb0ceee6c25f2a76e37a1ccedad928beeb60752bf173fe9b30595d" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.614781 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-create-pgw2h" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.616565 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-bjxvd" event={"ID":"2d95b148-4049-4cf7-96df-f3c712abe269","Type":"ContainerDied","Data":"de1eb5510da4de04b858a5237b24e854fb65cf1d0ae553d07982e27e32818877"} Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.616708 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de1eb5510da4de04b858a5237b24e854fb65cf1d0ae553d07982e27e32818877" Jan 23 08:39:47 crc kubenswrapper[4711]: I0123 08:39:47.616667 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-bjxvd" Jan 23 08:39:48 crc kubenswrapper[4711]: I0123 08:39:48.109887 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/root-account-create-update-bjxvd"] Jan 23 08:39:48 crc kubenswrapper[4711]: I0123 08:39:48.116140 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/root-account-create-update-bjxvd"] Jan 23 08:39:49 crc kubenswrapper[4711]: I0123 08:39:49.490157 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d95b148-4049-4cf7-96df-f3c712abe269" path="/var/lib/kubelet/pods/2d95b148-4049-4cf7-96df-f3c712abe269/volumes" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.558815 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/root-account-create-update-xqwbw"] Jan 23 08:39:51 crc kubenswrapper[4711]: E0123 08:39:51.559795 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f" containerName="mariadb-database-create" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.559814 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f" containerName="mariadb-database-create" Jan 23 08:39:51 crc kubenswrapper[4711]: E0123 08:39:51.559844 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d95b148-4049-4cf7-96df-f3c712abe269" containerName="mariadb-account-create-update" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.559852 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d95b148-4049-4cf7-96df-f3c712abe269" containerName="mariadb-account-create-update" Jan 23 08:39:51 crc kubenswrapper[4711]: E0123 08:39:51.559864 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15d700a2-d850-4643-b834-4bcad727b44e" containerName="mariadb-account-create-update" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.559872 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="15d700a2-d850-4643-b834-4bcad727b44e" containerName="mariadb-account-create-update" Jan 23 08:39:51 crc kubenswrapper[4711]: E0123 08:39:51.559885 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4af8008c-c038-45dd-9384-485e4ba2c730" containerName="mariadb-database-create" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.559893 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4af8008c-c038-45dd-9384-485e4ba2c730" containerName="mariadb-database-create" Jan 23 08:39:51 crc kubenswrapper[4711]: E0123 08:39:51.559911 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a72521f-2374-401f-99fe-a4c1c9a06aef" containerName="mariadb-account-create-update" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.559919 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a72521f-2374-401f-99fe-a4c1c9a06aef" containerName="mariadb-account-create-update" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.560099 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f" containerName="mariadb-database-create" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.560114 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a72521f-2374-401f-99fe-a4c1c9a06aef" containerName="mariadb-account-create-update" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.560123 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="15d700a2-d850-4643-b834-4bcad727b44e" containerName="mariadb-account-create-update" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.560143 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4af8008c-c038-45dd-9384-485e4ba2c730" containerName="mariadb-database-create" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.560152 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d95b148-4049-4cf7-96df-f3c712abe269" containerName="mariadb-account-create-update" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.561302 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-xqwbw" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.564444 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstack-mariadb-root-db-secret" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.621104 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-xqwbw"] Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.717199 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f29c800-7e46-463f-8416-46cdb5c1e5c2-operator-scripts\") pod \"root-account-create-update-xqwbw\" (UID: \"4f29c800-7e46-463f-8416-46cdb5c1e5c2\") " pod="nova-kuttl-default/root-account-create-update-xqwbw" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.717434 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jblt\" (UniqueName: \"kubernetes.io/projected/4f29c800-7e46-463f-8416-46cdb5c1e5c2-kube-api-access-5jblt\") pod \"root-account-create-update-xqwbw\" (UID: \"4f29c800-7e46-463f-8416-46cdb5c1e5c2\") " pod="nova-kuttl-default/root-account-create-update-xqwbw" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.819196 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jblt\" (UniqueName: \"kubernetes.io/projected/4f29c800-7e46-463f-8416-46cdb5c1e5c2-kube-api-access-5jblt\") pod \"root-account-create-update-xqwbw\" (UID: \"4f29c800-7e46-463f-8416-46cdb5c1e5c2\") " pod="nova-kuttl-default/root-account-create-update-xqwbw" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.819270 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f29c800-7e46-463f-8416-46cdb5c1e5c2-operator-scripts\") pod \"root-account-create-update-xqwbw\" (UID: \"4f29c800-7e46-463f-8416-46cdb5c1e5c2\") " pod="nova-kuttl-default/root-account-create-update-xqwbw" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.821081 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f29c800-7e46-463f-8416-46cdb5c1e5c2-operator-scripts\") pod \"root-account-create-update-xqwbw\" (UID: \"4f29c800-7e46-463f-8416-46cdb5c1e5c2\") " pod="nova-kuttl-default/root-account-create-update-xqwbw" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.840397 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jblt\" (UniqueName: \"kubernetes.io/projected/4f29c800-7e46-463f-8416-46cdb5c1e5c2-kube-api-access-5jblt\") pod \"root-account-create-update-xqwbw\" (UID: \"4f29c800-7e46-463f-8416-46cdb5c1e5c2\") " pod="nova-kuttl-default/root-account-create-update-xqwbw" Jan 23 08:39:51 crc kubenswrapper[4711]: I0123 08:39:51.939613 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-xqwbw" Jan 23 08:39:52 crc kubenswrapper[4711]: W0123 08:39:52.364239 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f29c800_7e46_463f_8416_46cdb5c1e5c2.slice/crio-6612246ac61096a308d586c18ff4620b6c005aeae1565e1b37748b1b225c3cba WatchSource:0}: Error finding container 6612246ac61096a308d586c18ff4620b6c005aeae1565e1b37748b1b225c3cba: Status 404 returned error can't find the container with id 6612246ac61096a308d586c18ff4620b6c005aeae1565e1b37748b1b225c3cba Jan 23 08:39:52 crc kubenswrapper[4711]: I0123 08:39:52.368096 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-xqwbw"] Jan 23 08:39:52 crc kubenswrapper[4711]: I0123 08:39:52.651729 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-xqwbw" event={"ID":"4f29c800-7e46-463f-8416-46cdb5c1e5c2","Type":"ContainerStarted","Data":"6612246ac61096a308d586c18ff4620b6c005aeae1565e1b37748b1b225c3cba"} Jan 23 08:39:55 crc kubenswrapper[4711]: I0123 08:39:55.672720 4711 generic.go:334] "Generic (PLEG): container finished" podID="4f29c800-7e46-463f-8416-46cdb5c1e5c2" containerID="a921f124d3747adbc1d15710fc8da7574d650743d83e854cb2284108913735fa" exitCode=0 Jan 23 08:39:55 crc kubenswrapper[4711]: I0123 08:39:55.672833 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-xqwbw" event={"ID":"4f29c800-7e46-463f-8416-46cdb5c1e5c2","Type":"ContainerDied","Data":"a921f124d3747adbc1d15710fc8da7574d650743d83e854cb2284108913735fa"} Jan 23 08:39:56 crc kubenswrapper[4711]: I0123 08:39:56.994761 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-xqwbw" Jan 23 08:39:57 crc kubenswrapper[4711]: I0123 08:39:57.105347 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jblt\" (UniqueName: \"kubernetes.io/projected/4f29c800-7e46-463f-8416-46cdb5c1e5c2-kube-api-access-5jblt\") pod \"4f29c800-7e46-463f-8416-46cdb5c1e5c2\" (UID: \"4f29c800-7e46-463f-8416-46cdb5c1e5c2\") " Jan 23 08:39:57 crc kubenswrapper[4711]: I0123 08:39:57.105574 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f29c800-7e46-463f-8416-46cdb5c1e5c2-operator-scripts\") pod \"4f29c800-7e46-463f-8416-46cdb5c1e5c2\" (UID: \"4f29c800-7e46-463f-8416-46cdb5c1e5c2\") " Jan 23 08:39:57 crc kubenswrapper[4711]: I0123 08:39:57.106247 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f29c800-7e46-463f-8416-46cdb5c1e5c2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4f29c800-7e46-463f-8416-46cdb5c1e5c2" (UID: "4f29c800-7e46-463f-8416-46cdb5c1e5c2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:39:57 crc kubenswrapper[4711]: I0123 08:39:57.110356 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f29c800-7e46-463f-8416-46cdb5c1e5c2-kube-api-access-5jblt" (OuterVolumeSpecName: "kube-api-access-5jblt") pod "4f29c800-7e46-463f-8416-46cdb5c1e5c2" (UID: "4f29c800-7e46-463f-8416-46cdb5c1e5c2"). InnerVolumeSpecName "kube-api-access-5jblt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:39:57 crc kubenswrapper[4711]: I0123 08:39:57.207467 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f29c800-7e46-463f-8416-46cdb5c1e5c2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:39:57 crc kubenswrapper[4711]: I0123 08:39:57.207572 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jblt\" (UniqueName: \"kubernetes.io/projected/4f29c800-7e46-463f-8416-46cdb5c1e5c2-kube-api-access-5jblt\") on node \"crc\" DevicePath \"\"" Jan 23 08:39:57 crc kubenswrapper[4711]: I0123 08:39:57.697339 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-xqwbw" event={"ID":"4f29c800-7e46-463f-8416-46cdb5c1e5c2","Type":"ContainerDied","Data":"6612246ac61096a308d586c18ff4620b6c005aeae1565e1b37748b1b225c3cba"} Jan 23 08:39:57 crc kubenswrapper[4711]: I0123 08:39:57.697390 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6612246ac61096a308d586c18ff4620b6c005aeae1565e1b37748b1b225c3cba" Jan 23 08:39:57 crc kubenswrapper[4711]: I0123 08:39:57.697466 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-xqwbw" Jan 23 08:39:58 crc kubenswrapper[4711]: I0123 08:39:58.104400 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/root-account-create-update-xqwbw"] Jan 23 08:39:58 crc kubenswrapper[4711]: I0123 08:39:58.111428 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/root-account-create-update-xqwbw"] Jan 23 08:39:59 crc kubenswrapper[4711]: I0123 08:39:59.486699 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f29c800-7e46-463f-8416-46cdb5c1e5c2" path="/var/lib/kubelet/pods/4f29c800-7e46-463f-8416-46cdb5c1e5c2/volumes" Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.587652 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/root-account-create-update-9g27p"] Jan 23 08:40:01 crc kubenswrapper[4711]: E0123 08:40:01.588090 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f29c800-7e46-463f-8416-46cdb5c1e5c2" containerName="mariadb-account-create-update" Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.588112 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f29c800-7e46-463f-8416-46cdb5c1e5c2" containerName="mariadb-account-create-update" Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.588411 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f29c800-7e46-463f-8416-46cdb5c1e5c2" containerName="mariadb-account-create-update" Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.589221 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-9g27p" Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.591396 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstack-mariadb-root-db-secret" Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.604484 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-9g27p"] Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.682404 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7359701-539c-40f8-894f-0086ba8c9706-operator-scripts\") pod \"root-account-create-update-9g27p\" (UID: \"d7359701-539c-40f8-894f-0086ba8c9706\") " pod="nova-kuttl-default/root-account-create-update-9g27p" Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.682528 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hs4x\" (UniqueName: \"kubernetes.io/projected/d7359701-539c-40f8-894f-0086ba8c9706-kube-api-access-8hs4x\") pod \"root-account-create-update-9g27p\" (UID: \"d7359701-539c-40f8-894f-0086ba8c9706\") " pod="nova-kuttl-default/root-account-create-update-9g27p" Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.784358 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hs4x\" (UniqueName: \"kubernetes.io/projected/d7359701-539c-40f8-894f-0086ba8c9706-kube-api-access-8hs4x\") pod \"root-account-create-update-9g27p\" (UID: \"d7359701-539c-40f8-894f-0086ba8c9706\") " pod="nova-kuttl-default/root-account-create-update-9g27p" Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.784945 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7359701-539c-40f8-894f-0086ba8c9706-operator-scripts\") pod \"root-account-create-update-9g27p\" (UID: \"d7359701-539c-40f8-894f-0086ba8c9706\") " pod="nova-kuttl-default/root-account-create-update-9g27p" Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.785926 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7359701-539c-40f8-894f-0086ba8c9706-operator-scripts\") pod \"root-account-create-update-9g27p\" (UID: \"d7359701-539c-40f8-894f-0086ba8c9706\") " pod="nova-kuttl-default/root-account-create-update-9g27p" Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.808823 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hs4x\" (UniqueName: \"kubernetes.io/projected/d7359701-539c-40f8-894f-0086ba8c9706-kube-api-access-8hs4x\") pod \"root-account-create-update-9g27p\" (UID: \"d7359701-539c-40f8-894f-0086ba8c9706\") " pod="nova-kuttl-default/root-account-create-update-9g27p" Jan 23 08:40:01 crc kubenswrapper[4711]: I0123 08:40:01.920386 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-9g27p" Jan 23 08:40:02 crc kubenswrapper[4711]: I0123 08:40:02.401633 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-9g27p"] Jan 23 08:40:02 crc kubenswrapper[4711]: I0123 08:40:02.735780 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-9g27p" event={"ID":"d7359701-539c-40f8-894f-0086ba8c9706","Type":"ContainerStarted","Data":"93dd9d88330b654bdc32bcd7243b9c25ebc5f600677680975b5dd98a111a4a32"} Jan 23 08:40:03 crc kubenswrapper[4711]: I0123 08:40:03.746694 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-9g27p" event={"ID":"d7359701-539c-40f8-894f-0086ba8c9706","Type":"ContainerStarted","Data":"18adc39da558e753913106afe00dcc3908d8edfd5ff3dfaa5b75d17bf4af4e13"} Jan 23 08:40:03 crc kubenswrapper[4711]: I0123 08:40:03.766778 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/root-account-create-update-9g27p" podStartSLOduration=2.766757762 podStartE2EDuration="2.766757762s" podCreationTimestamp="2026-01-23 08:40:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:40:03.764878966 +0000 UTC m=+1189.337835364" watchObservedRunningTime="2026-01-23 08:40:03.766757762 +0000 UTC m=+1189.339714130" Jan 23 08:40:04 crc kubenswrapper[4711]: I0123 08:40:04.756323 4711 generic.go:334] "Generic (PLEG): container finished" podID="d7359701-539c-40f8-894f-0086ba8c9706" containerID="18adc39da558e753913106afe00dcc3908d8edfd5ff3dfaa5b75d17bf4af4e13" exitCode=0 Jan 23 08:40:04 crc kubenswrapper[4711]: I0123 08:40:04.756378 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-9g27p" event={"ID":"d7359701-539c-40f8-894f-0086ba8c9706","Type":"ContainerDied","Data":"18adc39da558e753913106afe00dcc3908d8edfd5ff3dfaa5b75d17bf4af4e13"} Jan 23 08:40:06 crc kubenswrapper[4711]: I0123 08:40:06.045188 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-9g27p" Jan 23 08:40:06 crc kubenswrapper[4711]: I0123 08:40:06.095271 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7359701-539c-40f8-894f-0086ba8c9706-operator-scripts\") pod \"d7359701-539c-40f8-894f-0086ba8c9706\" (UID: \"d7359701-539c-40f8-894f-0086ba8c9706\") " Jan 23 08:40:06 crc kubenswrapper[4711]: I0123 08:40:06.095332 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hs4x\" (UniqueName: \"kubernetes.io/projected/d7359701-539c-40f8-894f-0086ba8c9706-kube-api-access-8hs4x\") pod \"d7359701-539c-40f8-894f-0086ba8c9706\" (UID: \"d7359701-539c-40f8-894f-0086ba8c9706\") " Jan 23 08:40:06 crc kubenswrapper[4711]: I0123 08:40:06.096184 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7359701-539c-40f8-894f-0086ba8c9706-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d7359701-539c-40f8-894f-0086ba8c9706" (UID: "d7359701-539c-40f8-894f-0086ba8c9706"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:40:06 crc kubenswrapper[4711]: I0123 08:40:06.101370 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7359701-539c-40f8-894f-0086ba8c9706-kube-api-access-8hs4x" (OuterVolumeSpecName: "kube-api-access-8hs4x") pod "d7359701-539c-40f8-894f-0086ba8c9706" (UID: "d7359701-539c-40f8-894f-0086ba8c9706"). InnerVolumeSpecName "kube-api-access-8hs4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:40:06 crc kubenswrapper[4711]: I0123 08:40:06.197560 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7359701-539c-40f8-894f-0086ba8c9706-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:06 crc kubenswrapper[4711]: I0123 08:40:06.197609 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hs4x\" (UniqueName: \"kubernetes.io/projected/d7359701-539c-40f8-894f-0086ba8c9706-kube-api-access-8hs4x\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:06 crc kubenswrapper[4711]: I0123 08:40:06.774352 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-9g27p" event={"ID":"d7359701-539c-40f8-894f-0086ba8c9706","Type":"ContainerDied","Data":"93dd9d88330b654bdc32bcd7243b9c25ebc5f600677680975b5dd98a111a4a32"} Jan 23 08:40:06 crc kubenswrapper[4711]: I0123 08:40:06.774407 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93dd9d88330b654bdc32bcd7243b9c25ebc5f600677680975b5dd98a111a4a32" Jan 23 08:40:06 crc kubenswrapper[4711]: I0123 08:40:06.774405 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-9g27p" Jan 23 08:40:08 crc kubenswrapper[4711]: I0123 08:40:08.141543 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/root-account-create-update-9g27p"] Jan 23 08:40:08 crc kubenswrapper[4711]: I0123 08:40:08.151241 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/root-account-create-update-9g27p"] Jan 23 08:40:08 crc kubenswrapper[4711]: I0123 08:40:08.789930 4711 generic.go:334] "Generic (PLEG): container finished" podID="f970c1db-48d5-4b49-afc1-eee7e1289da9" containerID="eb6682a48c60e2b6d9a2e6be3d59b5fc1082f4615530c40f87cd6b782a639fd7" exitCode=0 Jan 23 08:40:08 crc kubenswrapper[4711]: I0123 08:40:08.790007 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-server-0" event={"ID":"f970c1db-48d5-4b49-afc1-eee7e1289da9","Type":"ContainerDied","Data":"eb6682a48c60e2b6d9a2e6be3d59b5fc1082f4615530c40f87cd6b782a639fd7"} Jan 23 08:40:08 crc kubenswrapper[4711]: I0123 08:40:08.793434 4711 generic.go:334] "Generic (PLEG): container finished" podID="b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf" containerID="7afc35d31f32a8f230f06c62d2a4b358950b7b4c403215810e3f8d528f90df85" exitCode=0 Jan 23 08:40:08 crc kubenswrapper[4711]: I0123 08:40:08.793482 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" event={"ID":"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf","Type":"ContainerDied","Data":"7afc35d31f32a8f230f06c62d2a4b358950b7b4c403215810e3f8d528f90df85"} Jan 23 08:40:09 crc kubenswrapper[4711]: I0123 08:40:09.484312 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7359701-539c-40f8-894f-0086ba8c9706" path="/var/lib/kubelet/pods/d7359701-539c-40f8-894f-0086ba8c9706/volumes" Jan 23 08:40:09 crc kubenswrapper[4711]: I0123 08:40:09.803169 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-server-0" event={"ID":"f970c1db-48d5-4b49-afc1-eee7e1289da9","Type":"ContainerStarted","Data":"b525c3fc3f7aac6941aefdf0a3db15ea49c69c77e9b57610721da43b0e2bbe74"} Jan 23 08:40:09 crc kubenswrapper[4711]: I0123 08:40:09.803440 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:40:09 crc kubenswrapper[4711]: I0123 08:40:09.805955 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" event={"ID":"b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf","Type":"ContainerStarted","Data":"7cbf2ece407839b11a32c5bf2f7bc9a2a790f764488530702957f28c32a8cefe"} Jan 23 08:40:09 crc kubenswrapper[4711]: I0123 08:40:09.806433 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:40:09 crc kubenswrapper[4711]: I0123 08:40:09.808576 4711 generic.go:334] "Generic (PLEG): container finished" podID="00e53f46-c48c-4f2c-83aa-088781b82d46" containerID="edc7556801c37d653be8fa24490d02038dff85bb1351aa805c115fa4c1f52570" exitCode=0 Jan 23 08:40:09 crc kubenswrapper[4711]: I0123 08:40:09.808634 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-cell1-server-0" event={"ID":"00e53f46-c48c-4f2c-83aa-088781b82d46","Type":"ContainerDied","Data":"edc7556801c37d653be8fa24490d02038dff85bb1351aa805c115fa4c1f52570"} Jan 23 08:40:09 crc kubenswrapper[4711]: I0123 08:40:09.873008 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" podStartSLOduration=-9223371966.981783 podStartE2EDuration="1m9.872993762s" podCreationTimestamp="2026-01-23 08:39:00 +0000 UTC" firstStartedPulling="2026-01-23 08:39:03.341842166 +0000 UTC m=+1128.914798534" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:40:09.871439784 +0000 UTC m=+1195.444396152" watchObservedRunningTime="2026-01-23 08:40:09.872993762 +0000 UTC m=+1195.445950130" Jan 23 08:40:09 crc kubenswrapper[4711]: I0123 08:40:09.876993 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/rabbitmq-server-0" podStartSLOduration=37.778921745 podStartE2EDuration="1m9.87698365s" podCreationTimestamp="2026-01-23 08:39:00 +0000 UTC" firstStartedPulling="2026-01-23 08:39:03.246405973 +0000 UTC m=+1128.819362341" lastFinishedPulling="2026-01-23 08:39:35.344467878 +0000 UTC m=+1160.917424246" observedRunningTime="2026-01-23 08:40:09.835218988 +0000 UTC m=+1195.408175376" watchObservedRunningTime="2026-01-23 08:40:09.87698365 +0000 UTC m=+1195.449940018" Jan 23 08:40:10 crc kubenswrapper[4711]: I0123 08:40:10.817570 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-cell1-server-0" event={"ID":"00e53f46-c48c-4f2c-83aa-088781b82d46","Type":"ContainerStarted","Data":"88e3b19c086d7be27e6e90b687f8f14be90ede4a50aa616f6f410e45acfccd52"} Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.609684 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/root-account-create-update-8r6n2"] Jan 23 08:40:11 crc kubenswrapper[4711]: E0123 08:40:11.610151 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7359701-539c-40f8-894f-0086ba8c9706" containerName="mariadb-account-create-update" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.610165 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7359701-539c-40f8-894f-0086ba8c9706" containerName="mariadb-account-create-update" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.610329 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7359701-539c-40f8-894f-0086ba8c9706" containerName="mariadb-account-create-update" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.610990 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-8r6n2" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.613155 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstack-mariadb-root-db-secret" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.631161 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-8r6n2"] Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.707476 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8t5l\" (UniqueName: \"kubernetes.io/projected/ee520706-43b4-4093-9913-670a11dd0c55-kube-api-access-g8t5l\") pod \"root-account-create-update-8r6n2\" (UID: \"ee520706-43b4-4093-9913-670a11dd0c55\") " pod="nova-kuttl-default/root-account-create-update-8r6n2" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.707649 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee520706-43b4-4093-9913-670a11dd0c55-operator-scripts\") pod \"root-account-create-update-8r6n2\" (UID: \"ee520706-43b4-4093-9913-670a11dd0c55\") " pod="nova-kuttl-default/root-account-create-update-8r6n2" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.808757 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8t5l\" (UniqueName: \"kubernetes.io/projected/ee520706-43b4-4093-9913-670a11dd0c55-kube-api-access-g8t5l\") pod \"root-account-create-update-8r6n2\" (UID: \"ee520706-43b4-4093-9913-670a11dd0c55\") " pod="nova-kuttl-default/root-account-create-update-8r6n2" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.808892 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee520706-43b4-4093-9913-670a11dd0c55-operator-scripts\") pod \"root-account-create-update-8r6n2\" (UID: \"ee520706-43b4-4093-9913-670a11dd0c55\") " pod="nova-kuttl-default/root-account-create-update-8r6n2" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.809828 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee520706-43b4-4093-9913-670a11dd0c55-operator-scripts\") pod \"root-account-create-update-8r6n2\" (UID: \"ee520706-43b4-4093-9913-670a11dd0c55\") " pod="nova-kuttl-default/root-account-create-update-8r6n2" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.824299 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.830706 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8t5l\" (UniqueName: \"kubernetes.io/projected/ee520706-43b4-4093-9913-670a11dd0c55-kube-api-access-g8t5l\") pod \"root-account-create-update-8r6n2\" (UID: \"ee520706-43b4-4093-9913-670a11dd0c55\") " pod="nova-kuttl-default/root-account-create-update-8r6n2" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.852850 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/rabbitmq-cell1-server-0" podStartSLOduration=-9223371966.001944 podStartE2EDuration="1m10.852832293s" podCreationTimestamp="2026-01-23 08:39:01 +0000 UTC" firstStartedPulling="2026-01-23 08:39:03.480301031 +0000 UTC m=+1129.053257399" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:40:11.845844093 +0000 UTC m=+1197.418800461" watchObservedRunningTime="2026-01-23 08:40:11.852832293 +0000 UTC m=+1197.425788661" Jan 23 08:40:11 crc kubenswrapper[4711]: I0123 08:40:11.969831 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-8r6n2" Jan 23 08:40:12 crc kubenswrapper[4711]: I0123 08:40:12.512919 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-8r6n2"] Jan 23 08:40:12 crc kubenswrapper[4711]: I0123 08:40:12.832201 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-8r6n2" event={"ID":"ee520706-43b4-4093-9913-670a11dd0c55","Type":"ContainerStarted","Data":"e42547e8adbef23aeb04aa8863314e680316841c1b25d88926284f703acba8d1"} Jan 23 08:40:12 crc kubenswrapper[4711]: I0123 08:40:12.832263 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-8r6n2" event={"ID":"ee520706-43b4-4093-9913-670a11dd0c55","Type":"ContainerStarted","Data":"2c970da3a7566c91871b3294548f2f4adea581598b8dedaed68d707def0fd753"} Jan 23 08:40:12 crc kubenswrapper[4711]: I0123 08:40:12.853167 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/root-account-create-update-8r6n2" podStartSLOduration=1.853145149 podStartE2EDuration="1.853145149s" podCreationTimestamp="2026-01-23 08:40:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:40:12.845493651 +0000 UTC m=+1198.418450019" watchObservedRunningTime="2026-01-23 08:40:12.853145149 +0000 UTC m=+1198.426101517" Jan 23 08:40:13 crc kubenswrapper[4711]: I0123 08:40:13.841210 4711 generic.go:334] "Generic (PLEG): container finished" podID="ee520706-43b4-4093-9913-670a11dd0c55" containerID="e42547e8adbef23aeb04aa8863314e680316841c1b25d88926284f703acba8d1" exitCode=0 Jan 23 08:40:13 crc kubenswrapper[4711]: I0123 08:40:13.841314 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-8r6n2" event={"ID":"ee520706-43b4-4093-9913-670a11dd0c55","Type":"ContainerDied","Data":"e42547e8adbef23aeb04aa8863314e680316841c1b25d88926284f703acba8d1"} Jan 23 08:40:15 crc kubenswrapper[4711]: I0123 08:40:15.136837 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-8r6n2" Jan 23 08:40:15 crc kubenswrapper[4711]: I0123 08:40:15.287398 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee520706-43b4-4093-9913-670a11dd0c55-operator-scripts\") pod \"ee520706-43b4-4093-9913-670a11dd0c55\" (UID: \"ee520706-43b4-4093-9913-670a11dd0c55\") " Jan 23 08:40:15 crc kubenswrapper[4711]: I0123 08:40:15.287566 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8t5l\" (UniqueName: \"kubernetes.io/projected/ee520706-43b4-4093-9913-670a11dd0c55-kube-api-access-g8t5l\") pod \"ee520706-43b4-4093-9913-670a11dd0c55\" (UID: \"ee520706-43b4-4093-9913-670a11dd0c55\") " Jan 23 08:40:15 crc kubenswrapper[4711]: I0123 08:40:15.288352 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee520706-43b4-4093-9913-670a11dd0c55-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ee520706-43b4-4093-9913-670a11dd0c55" (UID: "ee520706-43b4-4093-9913-670a11dd0c55"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:40:15 crc kubenswrapper[4711]: I0123 08:40:15.305771 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee520706-43b4-4093-9913-670a11dd0c55-kube-api-access-g8t5l" (OuterVolumeSpecName: "kube-api-access-g8t5l") pod "ee520706-43b4-4093-9913-670a11dd0c55" (UID: "ee520706-43b4-4093-9913-670a11dd0c55"). InnerVolumeSpecName "kube-api-access-g8t5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:40:15 crc kubenswrapper[4711]: I0123 08:40:15.389990 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ee520706-43b4-4093-9913-670a11dd0c55-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:15 crc kubenswrapper[4711]: I0123 08:40:15.390023 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8t5l\" (UniqueName: \"kubernetes.io/projected/ee520706-43b4-4093-9913-670a11dd0c55-kube-api-access-g8t5l\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:15 crc kubenswrapper[4711]: I0123 08:40:15.861664 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-8r6n2" event={"ID":"ee520706-43b4-4093-9913-670a11dd0c55","Type":"ContainerDied","Data":"2c970da3a7566c91871b3294548f2f4adea581598b8dedaed68d707def0fd753"} Jan 23 08:40:15 crc kubenswrapper[4711]: I0123 08:40:15.861746 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c970da3a7566c91871b3294548f2f4adea581598b8dedaed68d707def0fd753" Jan 23 08:40:15 crc kubenswrapper[4711]: I0123 08:40:15.861745 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-8r6n2" Jan 23 08:40:18 crc kubenswrapper[4711]: I0123 08:40:18.143366 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/root-account-create-update-8r6n2"] Jan 23 08:40:18 crc kubenswrapper[4711]: I0123 08:40:18.148742 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/root-account-create-update-8r6n2"] Jan 23 08:40:19 crc kubenswrapper[4711]: I0123 08:40:19.484642 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee520706-43b4-4093-9913-670a11dd0c55" path="/var/lib/kubelet/pods/ee520706-43b4-4093-9913-670a11dd0c55/volumes" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.222554 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/rabbitmq-server-0" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.253476 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.732731 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.822331 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-db-sync-57tdw"] Jan 23 08:40:22 crc kubenswrapper[4711]: E0123 08:40:22.822653 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee520706-43b4-4093-9913-670a11dd0c55" containerName="mariadb-account-create-update" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.822670 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee520706-43b4-4093-9913-670a11dd0c55" containerName="mariadb-account-create-update" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.822805 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee520706-43b4-4093-9913-670a11dd0c55" containerName="mariadb-account-create-update" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.823319 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.826977 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-scripts" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.827132 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.827074 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-keystone-dockercfg-9rrpp" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.829202 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-config-data" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.838365 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-db-sync-57tdw"] Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.930616 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snc98\" (UniqueName: \"kubernetes.io/projected/3a889480-e905-406c-a80d-a01ddebb3a4a-kube-api-access-snc98\") pod \"keystone-db-sync-57tdw\" (UID: \"3a889480-e905-406c-a80d-a01ddebb3a4a\") " pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.930799 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a889480-e905-406c-a80d-a01ddebb3a4a-config-data\") pod \"keystone-db-sync-57tdw\" (UID: \"3a889480-e905-406c-a80d-a01ddebb3a4a\") " pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:22 crc kubenswrapper[4711]: I0123 08:40:22.930834 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a889480-e905-406c-a80d-a01ddebb3a4a-combined-ca-bundle\") pod \"keystone-db-sync-57tdw\" (UID: \"3a889480-e905-406c-a80d-a01ddebb3a4a\") " pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.032732 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a889480-e905-406c-a80d-a01ddebb3a4a-config-data\") pod \"keystone-db-sync-57tdw\" (UID: \"3a889480-e905-406c-a80d-a01ddebb3a4a\") " pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.033569 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a889480-e905-406c-a80d-a01ddebb3a4a-combined-ca-bundle\") pod \"keystone-db-sync-57tdw\" (UID: \"3a889480-e905-406c-a80d-a01ddebb3a4a\") " pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.033658 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snc98\" (UniqueName: \"kubernetes.io/projected/3a889480-e905-406c-a80d-a01ddebb3a4a-kube-api-access-snc98\") pod \"keystone-db-sync-57tdw\" (UID: \"3a889480-e905-406c-a80d-a01ddebb3a4a\") " pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.040029 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a889480-e905-406c-a80d-a01ddebb3a4a-combined-ca-bundle\") pod \"keystone-db-sync-57tdw\" (UID: \"3a889480-e905-406c-a80d-a01ddebb3a4a\") " pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.040184 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a889480-e905-406c-a80d-a01ddebb3a4a-config-data\") pod \"keystone-db-sync-57tdw\" (UID: \"3a889480-e905-406c-a80d-a01ddebb3a4a\") " pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.051615 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snc98\" (UniqueName: \"kubernetes.io/projected/3a889480-e905-406c-a80d-a01ddebb3a4a-kube-api-access-snc98\") pod \"keystone-db-sync-57tdw\" (UID: \"3a889480-e905-406c-a80d-a01ddebb3a4a\") " pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.142329 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.260334 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/root-account-create-update-ckwk4"] Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.261788 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-ckwk4" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.264759 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstack-cell1-mariadb-root-db-secret" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.268056 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-ckwk4"] Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.392747 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-db-sync-57tdw"] Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.439675 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbx22\" (UniqueName: \"kubernetes.io/projected/aed12545-6ee6-4109-ba42-d89dc6b7939a-kube-api-access-sbx22\") pod \"root-account-create-update-ckwk4\" (UID: \"aed12545-6ee6-4109-ba42-d89dc6b7939a\") " pod="nova-kuttl-default/root-account-create-update-ckwk4" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.439997 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aed12545-6ee6-4109-ba42-d89dc6b7939a-operator-scripts\") pod \"root-account-create-update-ckwk4\" (UID: \"aed12545-6ee6-4109-ba42-d89dc6b7939a\") " pod="nova-kuttl-default/root-account-create-update-ckwk4" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.541983 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbx22\" (UniqueName: \"kubernetes.io/projected/aed12545-6ee6-4109-ba42-d89dc6b7939a-kube-api-access-sbx22\") pod \"root-account-create-update-ckwk4\" (UID: \"aed12545-6ee6-4109-ba42-d89dc6b7939a\") " pod="nova-kuttl-default/root-account-create-update-ckwk4" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.542408 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aed12545-6ee6-4109-ba42-d89dc6b7939a-operator-scripts\") pod \"root-account-create-update-ckwk4\" (UID: \"aed12545-6ee6-4109-ba42-d89dc6b7939a\") " pod="nova-kuttl-default/root-account-create-update-ckwk4" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.543412 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aed12545-6ee6-4109-ba42-d89dc6b7939a-operator-scripts\") pod \"root-account-create-update-ckwk4\" (UID: \"aed12545-6ee6-4109-ba42-d89dc6b7939a\") " pod="nova-kuttl-default/root-account-create-update-ckwk4" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.561581 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbx22\" (UniqueName: \"kubernetes.io/projected/aed12545-6ee6-4109-ba42-d89dc6b7939a-kube-api-access-sbx22\") pod \"root-account-create-update-ckwk4\" (UID: \"aed12545-6ee6-4109-ba42-d89dc6b7939a\") " pod="nova-kuttl-default/root-account-create-update-ckwk4" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.588359 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-ckwk4" Jan 23 08:40:23 crc kubenswrapper[4711]: I0123 08:40:23.915942 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-sync-57tdw" event={"ID":"3a889480-e905-406c-a80d-a01ddebb3a4a","Type":"ContainerStarted","Data":"858c5bd9cd150ca315146657822766b8ec5c769e3a497d79d39469e871af31d4"} Jan 23 08:40:24 crc kubenswrapper[4711]: I0123 08:40:24.094647 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-ckwk4"] Jan 23 08:40:24 crc kubenswrapper[4711]: I0123 08:40:24.923857 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-ckwk4" event={"ID":"aed12545-6ee6-4109-ba42-d89dc6b7939a","Type":"ContainerStarted","Data":"d5bfc1736b43e40ffc0ebf7ef3bc12639d0d39e317a76e8002c24ef68a3ad688"} Jan 23 08:40:25 crc kubenswrapper[4711]: I0123 08:40:25.933396 4711 generic.go:334] "Generic (PLEG): container finished" podID="aed12545-6ee6-4109-ba42-d89dc6b7939a" containerID="d459bb977e47533e5fad502adff1260883892a96c3f4c7c9f2af17c793d0f2ff" exitCode=0 Jan 23 08:40:25 crc kubenswrapper[4711]: I0123 08:40:25.933472 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-ckwk4" event={"ID":"aed12545-6ee6-4109-ba42-d89dc6b7939a","Type":"ContainerDied","Data":"d459bb977e47533e5fad502adff1260883892a96c3f4c7c9f2af17c793d0f2ff"} Jan 23 08:40:25 crc kubenswrapper[4711]: I0123 08:40:25.993150 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:40:25 crc kubenswrapper[4711]: I0123 08:40:25.993207 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:40:29 crc kubenswrapper[4711]: I0123 08:40:29.712980 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-ckwk4" Jan 23 08:40:29 crc kubenswrapper[4711]: I0123 08:40:29.840264 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aed12545-6ee6-4109-ba42-d89dc6b7939a-operator-scripts\") pod \"aed12545-6ee6-4109-ba42-d89dc6b7939a\" (UID: \"aed12545-6ee6-4109-ba42-d89dc6b7939a\") " Jan 23 08:40:29 crc kubenswrapper[4711]: I0123 08:40:29.840638 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbx22\" (UniqueName: \"kubernetes.io/projected/aed12545-6ee6-4109-ba42-d89dc6b7939a-kube-api-access-sbx22\") pod \"aed12545-6ee6-4109-ba42-d89dc6b7939a\" (UID: \"aed12545-6ee6-4109-ba42-d89dc6b7939a\") " Jan 23 08:40:29 crc kubenswrapper[4711]: I0123 08:40:29.841188 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aed12545-6ee6-4109-ba42-d89dc6b7939a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "aed12545-6ee6-4109-ba42-d89dc6b7939a" (UID: "aed12545-6ee6-4109-ba42-d89dc6b7939a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:40:29 crc kubenswrapper[4711]: I0123 08:40:29.852259 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aed12545-6ee6-4109-ba42-d89dc6b7939a-kube-api-access-sbx22" (OuterVolumeSpecName: "kube-api-access-sbx22") pod "aed12545-6ee6-4109-ba42-d89dc6b7939a" (UID: "aed12545-6ee6-4109-ba42-d89dc6b7939a"). InnerVolumeSpecName "kube-api-access-sbx22". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:40:29 crc kubenswrapper[4711]: I0123 08:40:29.942124 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aed12545-6ee6-4109-ba42-d89dc6b7939a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:29 crc kubenswrapper[4711]: I0123 08:40:29.942170 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbx22\" (UniqueName: \"kubernetes.io/projected/aed12545-6ee6-4109-ba42-d89dc6b7939a-kube-api-access-sbx22\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:29 crc kubenswrapper[4711]: I0123 08:40:29.963427 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-ckwk4" event={"ID":"aed12545-6ee6-4109-ba42-d89dc6b7939a","Type":"ContainerDied","Data":"d5bfc1736b43e40ffc0ebf7ef3bc12639d0d39e317a76e8002c24ef68a3ad688"} Jan 23 08:40:29 crc kubenswrapper[4711]: I0123 08:40:29.963472 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5bfc1736b43e40ffc0ebf7ef3bc12639d0d39e317a76e8002c24ef68a3ad688" Jan 23 08:40:29 crc kubenswrapper[4711]: I0123 08:40:29.963477 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-ckwk4" Jan 23 08:40:35 crc kubenswrapper[4711]: I0123 08:40:35.007636 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-sync-57tdw" event={"ID":"3a889480-e905-406c-a80d-a01ddebb3a4a","Type":"ContainerStarted","Data":"894aa8f8bd6d4097ffc9342a899c14a16061645d7203fc874fecda0c019af7d3"} Jan 23 08:40:35 crc kubenswrapper[4711]: I0123 08:40:35.026798 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-db-sync-57tdw" podStartSLOduration=2.464310118 podStartE2EDuration="13.026781581s" podCreationTimestamp="2026-01-23 08:40:22 +0000 UTC" firstStartedPulling="2026-01-23 08:40:23.39593989 +0000 UTC m=+1208.968896258" lastFinishedPulling="2026-01-23 08:40:33.958411343 +0000 UTC m=+1219.531367721" observedRunningTime="2026-01-23 08:40:35.026400921 +0000 UTC m=+1220.599357309" watchObservedRunningTime="2026-01-23 08:40:35.026781581 +0000 UTC m=+1220.599737949" Jan 23 08:40:41 crc kubenswrapper[4711]: I0123 08:40:41.053866 4711 generic.go:334] "Generic (PLEG): container finished" podID="3a889480-e905-406c-a80d-a01ddebb3a4a" containerID="894aa8f8bd6d4097ffc9342a899c14a16061645d7203fc874fecda0c019af7d3" exitCode=0 Jan 23 08:40:41 crc kubenswrapper[4711]: I0123 08:40:41.053969 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-sync-57tdw" event={"ID":"3a889480-e905-406c-a80d-a01ddebb3a4a","Type":"ContainerDied","Data":"894aa8f8bd6d4097ffc9342a899c14a16061645d7203fc874fecda0c019af7d3"} Jan 23 08:40:42 crc kubenswrapper[4711]: I0123 08:40:42.356326 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:42 crc kubenswrapper[4711]: I0123 08:40:42.468540 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a889480-e905-406c-a80d-a01ddebb3a4a-combined-ca-bundle\") pod \"3a889480-e905-406c-a80d-a01ddebb3a4a\" (UID: \"3a889480-e905-406c-a80d-a01ddebb3a4a\") " Jan 23 08:40:42 crc kubenswrapper[4711]: I0123 08:40:42.468787 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snc98\" (UniqueName: \"kubernetes.io/projected/3a889480-e905-406c-a80d-a01ddebb3a4a-kube-api-access-snc98\") pod \"3a889480-e905-406c-a80d-a01ddebb3a4a\" (UID: \"3a889480-e905-406c-a80d-a01ddebb3a4a\") " Jan 23 08:40:42 crc kubenswrapper[4711]: I0123 08:40:42.468820 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a889480-e905-406c-a80d-a01ddebb3a4a-config-data\") pod \"3a889480-e905-406c-a80d-a01ddebb3a4a\" (UID: \"3a889480-e905-406c-a80d-a01ddebb3a4a\") " Jan 23 08:40:42 crc kubenswrapper[4711]: I0123 08:40:42.476106 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a889480-e905-406c-a80d-a01ddebb3a4a-kube-api-access-snc98" (OuterVolumeSpecName: "kube-api-access-snc98") pod "3a889480-e905-406c-a80d-a01ddebb3a4a" (UID: "3a889480-e905-406c-a80d-a01ddebb3a4a"). InnerVolumeSpecName "kube-api-access-snc98". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:40:42 crc kubenswrapper[4711]: I0123 08:40:42.495896 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a889480-e905-406c-a80d-a01ddebb3a4a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a889480-e905-406c-a80d-a01ddebb3a4a" (UID: "3a889480-e905-406c-a80d-a01ddebb3a4a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:40:42 crc kubenswrapper[4711]: I0123 08:40:42.508252 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a889480-e905-406c-a80d-a01ddebb3a4a-config-data" (OuterVolumeSpecName: "config-data") pod "3a889480-e905-406c-a80d-a01ddebb3a4a" (UID: "3a889480-e905-406c-a80d-a01ddebb3a4a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:40:42 crc kubenswrapper[4711]: I0123 08:40:42.571211 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a889480-e905-406c-a80d-a01ddebb3a4a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:42 crc kubenswrapper[4711]: I0123 08:40:42.571259 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snc98\" (UniqueName: \"kubernetes.io/projected/3a889480-e905-406c-a80d-a01ddebb3a4a-kube-api-access-snc98\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:42 crc kubenswrapper[4711]: I0123 08:40:42.571280 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a889480-e905-406c-a80d-a01ddebb3a4a-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.072591 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-sync-57tdw" event={"ID":"3a889480-e905-406c-a80d-a01ddebb3a4a","Type":"ContainerDied","Data":"858c5bd9cd150ca315146657822766b8ec5c769e3a497d79d39469e871af31d4"} Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.072639 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="858c5bd9cd150ca315146657822766b8ec5c769e3a497d79d39469e871af31d4" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.072687 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-sync-57tdw" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.275022 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-bootstrap-hfb95"] Jan 23 08:40:43 crc kubenswrapper[4711]: E0123 08:40:43.275428 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aed12545-6ee6-4109-ba42-d89dc6b7939a" containerName="mariadb-account-create-update" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.275447 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="aed12545-6ee6-4109-ba42-d89dc6b7939a" containerName="mariadb-account-create-update" Jan 23 08:40:43 crc kubenswrapper[4711]: E0123 08:40:43.275466 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a889480-e905-406c-a80d-a01ddebb3a4a" containerName="keystone-db-sync" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.275478 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a889480-e905-406c-a80d-a01ddebb3a4a" containerName="keystone-db-sync" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.275696 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a889480-e905-406c-a80d-a01ddebb3a4a" containerName="keystone-db-sync" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.275723 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="aed12545-6ee6-4109-ba42-d89dc6b7939a" containerName="mariadb-account-create-update" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.276388 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.281900 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-keystone-dockercfg-9rrpp" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.282088 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.283735 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-config-data" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.283886 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-scripts" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.286938 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"osp-secret" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.295410 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-hfb95"] Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.382671 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-combined-ca-bundle\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.382763 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-credential-keys\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.382830 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4scpj\" (UniqueName: \"kubernetes.io/projected/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-kube-api-access-4scpj\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.382859 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-scripts\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.383030 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-fernet-keys\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.383087 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-config-data\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.484683 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-fernet-keys\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.485043 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-config-data\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.485063 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-combined-ca-bundle\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.485099 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-credential-keys\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.485141 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4scpj\" (UniqueName: \"kubernetes.io/projected/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-kube-api-access-4scpj\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.485160 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-scripts\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.485477 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/placement-db-sync-qtrk2"] Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.486580 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.493909 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-scripts\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.493960 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-credential-keys\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.495705 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-combined-ca-bundle\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.498080 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-fernet-keys\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.503253 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-db-sync-qtrk2"] Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.513543 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-config-data\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.520865 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-scripts" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.521074 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-placement-dockercfg-549tr" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.521203 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-config-data" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.526990 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4scpj\" (UniqueName: \"kubernetes.io/projected/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-kube-api-access-4scpj\") pod \"keystone-bootstrap-hfb95\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.586562 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17c1d7d2-2f13-4a96-8b57-ef301be89606-logs\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.586953 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-scripts\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.587170 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-combined-ca-bundle\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.588318 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mgvd\" (UniqueName: \"kubernetes.io/projected/17c1d7d2-2f13-4a96-8b57-ef301be89606-kube-api-access-9mgvd\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.588450 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-config-data\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.600209 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.693945 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17c1d7d2-2f13-4a96-8b57-ef301be89606-logs\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.694052 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-scripts\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.694124 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-combined-ca-bundle\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.694168 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mgvd\" (UniqueName: \"kubernetes.io/projected/17c1d7d2-2f13-4a96-8b57-ef301be89606-kube-api-access-9mgvd\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.694199 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-config-data\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.695157 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17c1d7d2-2f13-4a96-8b57-ef301be89606-logs\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.702605 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-scripts\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.705372 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-config-data\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.717469 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-combined-ca-bundle\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.720734 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mgvd\" (UniqueName: \"kubernetes.io/projected/17c1d7d2-2f13-4a96-8b57-ef301be89606-kube-api-access-9mgvd\") pod \"placement-db-sync-qtrk2\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:43 crc kubenswrapper[4711]: I0123 08:40:43.877799 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:44 crc kubenswrapper[4711]: I0123 08:40:44.056073 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-hfb95"] Jan 23 08:40:44 crc kubenswrapper[4711]: I0123 08:40:44.092630 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-hfb95" event={"ID":"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf","Type":"ContainerStarted","Data":"2bfe3a417ecc781589ec83c5e690e03b87582893546ad2c70c99fcbdd8a2f0a4"} Jan 23 08:40:44 crc kubenswrapper[4711]: W0123 08:40:44.154415 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17c1d7d2_2f13_4a96_8b57_ef301be89606.slice/crio-0905f7bdf53976395a11db6285072d45765cf8f9af97160ca546a48176094943 WatchSource:0}: Error finding container 0905f7bdf53976395a11db6285072d45765cf8f9af97160ca546a48176094943: Status 404 returned error can't find the container with id 0905f7bdf53976395a11db6285072d45765cf8f9af97160ca546a48176094943 Jan 23 08:40:44 crc kubenswrapper[4711]: I0123 08:40:44.155805 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-db-sync-qtrk2"] Jan 23 08:40:45 crc kubenswrapper[4711]: I0123 08:40:45.102001 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-sync-qtrk2" event={"ID":"17c1d7d2-2f13-4a96-8b57-ef301be89606","Type":"ContainerStarted","Data":"0905f7bdf53976395a11db6285072d45765cf8f9af97160ca546a48176094943"} Jan 23 08:40:45 crc kubenswrapper[4711]: I0123 08:40:45.103745 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-hfb95" event={"ID":"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf","Type":"ContainerStarted","Data":"a677978ad45e0ced786848ea5f63c6fc496d6cce3270f396b60ee07b19ff10cf"} Jan 23 08:40:45 crc kubenswrapper[4711]: I0123 08:40:45.124927 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-bootstrap-hfb95" podStartSLOduration=2.124912562 podStartE2EDuration="2.124912562s" podCreationTimestamp="2026-01-23 08:40:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:40:45.123778144 +0000 UTC m=+1230.696734572" watchObservedRunningTime="2026-01-23 08:40:45.124912562 +0000 UTC m=+1230.697868930" Jan 23 08:40:52 crc kubenswrapper[4711]: I0123 08:40:52.163292 4711 generic.go:334] "Generic (PLEG): container finished" podID="dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf" containerID="a677978ad45e0ced786848ea5f63c6fc496d6cce3270f396b60ee07b19ff10cf" exitCode=0 Jan 23 08:40:52 crc kubenswrapper[4711]: I0123 08:40:52.163374 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-hfb95" event={"ID":"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf","Type":"ContainerDied","Data":"a677978ad45e0ced786848ea5f63c6fc496d6cce3270f396b60ee07b19ff10cf"} Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.173048 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-sync-qtrk2" event={"ID":"17c1d7d2-2f13-4a96-8b57-ef301be89606","Type":"ContainerStarted","Data":"2857200d70b376c98dcc2c645688d4a72ff3a2fb1fb904b1ed49f2fed40b4b89"} Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.229600 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/placement-db-sync-qtrk2" podStartSLOduration=2.405149757 podStartE2EDuration="10.229547819s" podCreationTimestamp="2026-01-23 08:40:43 +0000 UTC" firstStartedPulling="2026-01-23 08:40:44.161676913 +0000 UTC m=+1229.734633281" lastFinishedPulling="2026-01-23 08:40:51.986074975 +0000 UTC m=+1237.559031343" observedRunningTime="2026-01-23 08:40:53.186217129 +0000 UTC m=+1238.759173497" watchObservedRunningTime="2026-01-23 08:40:53.229547819 +0000 UTC m=+1238.802504197" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.513160 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.555981 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-combined-ca-bundle\") pod \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.556113 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-credential-keys\") pod \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.556147 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4scpj\" (UniqueName: \"kubernetes.io/projected/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-kube-api-access-4scpj\") pod \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.556178 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-config-data\") pod \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.556223 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-fernet-keys\") pod \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.556245 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-scripts\") pod \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\" (UID: \"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf\") " Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.561940 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf" (UID: "dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.562271 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-kube-api-access-4scpj" (OuterVolumeSpecName: "kube-api-access-4scpj") pod "dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf" (UID: "dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf"). InnerVolumeSpecName "kube-api-access-4scpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.562593 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-scripts" (OuterVolumeSpecName: "scripts") pod "dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf" (UID: "dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.562837 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf" (UID: "dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.583411 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-config-data" (OuterVolumeSpecName: "config-data") pod "dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf" (UID: "dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.586840 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf" (UID: "dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.658158 4711 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.658192 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4scpj\" (UniqueName: \"kubernetes.io/projected/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-kube-api-access-4scpj\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.658207 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.658219 4711 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.658231 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:53 crc kubenswrapper[4711]: I0123 08:40:53.658241 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.182876 4711 generic.go:334] "Generic (PLEG): container finished" podID="17c1d7d2-2f13-4a96-8b57-ef301be89606" containerID="2857200d70b376c98dcc2c645688d4a72ff3a2fb1fb904b1ed49f2fed40b4b89" exitCode=0 Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.182923 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-sync-qtrk2" event={"ID":"17c1d7d2-2f13-4a96-8b57-ef301be89606","Type":"ContainerDied","Data":"2857200d70b376c98dcc2c645688d4a72ff3a2fb1fb904b1ed49f2fed40b4b89"} Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.185293 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-hfb95" event={"ID":"dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf","Type":"ContainerDied","Data":"2bfe3a417ecc781589ec83c5e690e03b87582893546ad2c70c99fcbdd8a2f0a4"} Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.185321 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bfe3a417ecc781589ec83c5e690e03b87582893546ad2c70c99fcbdd8a2f0a4" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.185365 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-hfb95" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.352038 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-hfb95"] Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.359639 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-hfb95"] Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.450537 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-bootstrap-l6szl"] Jan 23 08:40:54 crc kubenswrapper[4711]: E0123 08:40:54.450896 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf" containerName="keystone-bootstrap" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.450913 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf" containerName="keystone-bootstrap" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.451098 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf" containerName="keystone-bootstrap" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.451715 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.457111 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.457114 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-scripts" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.457234 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"osp-secret" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.457301 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-config-data" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.464247 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-keystone-dockercfg-9rrpp" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.468547 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-l6szl"] Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.574183 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-combined-ca-bundle\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.574270 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-fernet-keys\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.574366 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twsbs\" (UniqueName: \"kubernetes.io/projected/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-kube-api-access-twsbs\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.574416 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-scripts\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.574464 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-credential-keys\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.574482 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-config-data\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.675977 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-fernet-keys\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.676740 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twsbs\" (UniqueName: \"kubernetes.io/projected/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-kube-api-access-twsbs\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.676872 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-scripts\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.676909 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-credential-keys\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.677261 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-config-data\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.677324 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-combined-ca-bundle\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.680875 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-scripts\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.681369 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-fernet-keys\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.681598 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-combined-ca-bundle\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.681995 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-credential-keys\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.682487 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-config-data\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.696009 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twsbs\" (UniqueName: \"kubernetes.io/projected/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-kube-api-access-twsbs\") pod \"keystone-bootstrap-l6szl\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:54 crc kubenswrapper[4711]: I0123 08:40:54.769178 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.226614 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-l6szl"] Jan 23 08:40:55 crc kubenswrapper[4711]: W0123 08:40:55.237947 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff3509f4_218f_4ca4_8f1a_4533c8b4ca49.slice/crio-c1d05f5f4f22321f9f1f804db0814fc34600c32c86c377456c67f68d0554227a WatchSource:0}: Error finding container c1d05f5f4f22321f9f1f804db0814fc34600c32c86c377456c67f68d0554227a: Status 404 returned error can't find the container with id c1d05f5f4f22321f9f1f804db0814fc34600c32c86c377456c67f68d0554227a Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.484332 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf" path="/var/lib/kubelet/pods/dafa2cfd-5e5c-46f7-aea4-10f6f4bbbbbf/volumes" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.533730 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.710157 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-combined-ca-bundle\") pod \"17c1d7d2-2f13-4a96-8b57-ef301be89606\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.710256 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-scripts\") pod \"17c1d7d2-2f13-4a96-8b57-ef301be89606\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.710313 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-config-data\") pod \"17c1d7d2-2f13-4a96-8b57-ef301be89606\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.710351 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17c1d7d2-2f13-4a96-8b57-ef301be89606-logs\") pod \"17c1d7d2-2f13-4a96-8b57-ef301be89606\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.710423 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mgvd\" (UniqueName: \"kubernetes.io/projected/17c1d7d2-2f13-4a96-8b57-ef301be89606-kube-api-access-9mgvd\") pod \"17c1d7d2-2f13-4a96-8b57-ef301be89606\" (UID: \"17c1d7d2-2f13-4a96-8b57-ef301be89606\") " Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.710943 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17c1d7d2-2f13-4a96-8b57-ef301be89606-logs" (OuterVolumeSpecName: "logs") pod "17c1d7d2-2f13-4a96-8b57-ef301be89606" (UID: "17c1d7d2-2f13-4a96-8b57-ef301be89606"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.714145 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-scripts" (OuterVolumeSpecName: "scripts") pod "17c1d7d2-2f13-4a96-8b57-ef301be89606" (UID: "17c1d7d2-2f13-4a96-8b57-ef301be89606"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.714781 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17c1d7d2-2f13-4a96-8b57-ef301be89606-kube-api-access-9mgvd" (OuterVolumeSpecName: "kube-api-access-9mgvd") pod "17c1d7d2-2f13-4a96-8b57-ef301be89606" (UID: "17c1d7d2-2f13-4a96-8b57-ef301be89606"). InnerVolumeSpecName "kube-api-access-9mgvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.732460 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-config-data" (OuterVolumeSpecName: "config-data") pod "17c1d7d2-2f13-4a96-8b57-ef301be89606" (UID: "17c1d7d2-2f13-4a96-8b57-ef301be89606"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.732999 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17c1d7d2-2f13-4a96-8b57-ef301be89606" (UID: "17c1d7d2-2f13-4a96-8b57-ef301be89606"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.812130 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mgvd\" (UniqueName: \"kubernetes.io/projected/17c1d7d2-2f13-4a96-8b57-ef301be89606-kube-api-access-9mgvd\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.812177 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.812189 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.812200 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17c1d7d2-2f13-4a96-8b57-ef301be89606-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.812211 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17c1d7d2-2f13-4a96-8b57-ef301be89606-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.994043 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:40:55 crc kubenswrapper[4711]: I0123 08:40:55.995391 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.202925 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-l6szl" event={"ID":"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49","Type":"ContainerStarted","Data":"29498e8723bec53c07ceb121f585404a7d28138b00bc83f2c525b61a8a00336d"} Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.202966 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-l6szl" event={"ID":"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49","Type":"ContainerStarted","Data":"c1d05f5f4f22321f9f1f804db0814fc34600c32c86c377456c67f68d0554227a"} Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.205773 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-sync-qtrk2" event={"ID":"17c1d7d2-2f13-4a96-8b57-ef301be89606","Type":"ContainerDied","Data":"0905f7bdf53976395a11db6285072d45765cf8f9af97160ca546a48176094943"} Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.206015 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0905f7bdf53976395a11db6285072d45765cf8f9af97160ca546a48176094943" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.206019 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-sync-qtrk2" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.229411 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-bootstrap-l6szl" podStartSLOduration=2.2293915970000002 podStartE2EDuration="2.229391597s" podCreationTimestamp="2026-01-23 08:40:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:40:56.223708436 +0000 UTC m=+1241.796673245" watchObservedRunningTime="2026-01-23 08:40:56.229391597 +0000 UTC m=+1241.802347965" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.400240 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/placement-559c845968-gb6qv"] Jan 23 08:40:56 crc kubenswrapper[4711]: E0123 08:40:56.400686 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17c1d7d2-2f13-4a96-8b57-ef301be89606" containerName="placement-db-sync" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.400707 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="17c1d7d2-2f13-4a96-8b57-ef301be89606" containerName="placement-db-sync" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.400865 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="17c1d7d2-2f13-4a96-8b57-ef301be89606" containerName="placement-db-sync" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.401822 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.405758 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-config-data" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.405866 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-placement-dockercfg-549tr" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.406062 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-scripts" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.414026 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-559c845968-gb6qv"] Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.452110 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj2qv\" (UniqueName: \"kubernetes.io/projected/eb857478-48a3-4ed9-8a19-47386937c4d7-kube-api-access-zj2qv\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.452195 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb857478-48a3-4ed9-8a19-47386937c4d7-config-data\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.452293 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb857478-48a3-4ed9-8a19-47386937c4d7-combined-ca-bundle\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.452725 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb857478-48a3-4ed9-8a19-47386937c4d7-logs\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.452781 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb857478-48a3-4ed9-8a19-47386937c4d7-scripts\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.554482 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb857478-48a3-4ed9-8a19-47386937c4d7-logs\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.554539 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb857478-48a3-4ed9-8a19-47386937c4d7-scripts\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.554589 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj2qv\" (UniqueName: \"kubernetes.io/projected/eb857478-48a3-4ed9-8a19-47386937c4d7-kube-api-access-zj2qv\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.554632 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb857478-48a3-4ed9-8a19-47386937c4d7-config-data\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.554651 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb857478-48a3-4ed9-8a19-47386937c4d7-combined-ca-bundle\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.555055 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb857478-48a3-4ed9-8a19-47386937c4d7-logs\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.559204 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb857478-48a3-4ed9-8a19-47386937c4d7-scripts\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.559601 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb857478-48a3-4ed9-8a19-47386937c4d7-combined-ca-bundle\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.561266 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb857478-48a3-4ed9-8a19-47386937c4d7-config-data\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.589180 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj2qv\" (UniqueName: \"kubernetes.io/projected/eb857478-48a3-4ed9-8a19-47386937c4d7-kube-api-access-zj2qv\") pod \"placement-559c845968-gb6qv\" (UID: \"eb857478-48a3-4ed9-8a19-47386937c4d7\") " pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:56 crc kubenswrapper[4711]: I0123 08:40:56.726358 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:57 crc kubenswrapper[4711]: I0123 08:40:57.586772 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-559c845968-gb6qv"] Jan 23 08:40:58 crc kubenswrapper[4711]: I0123 08:40:58.310135 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-559c845968-gb6qv" event={"ID":"eb857478-48a3-4ed9-8a19-47386937c4d7","Type":"ContainerStarted","Data":"7cb525666e005011d93c6eac897c343cef582f0c24fd476f52573a800d1c26d2"} Jan 23 08:40:58 crc kubenswrapper[4711]: I0123 08:40:58.310756 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-559c845968-gb6qv" event={"ID":"eb857478-48a3-4ed9-8a19-47386937c4d7","Type":"ContainerStarted","Data":"aed6ee5dc52ee580e00cda954eb37cf7d810243bb7694e04834a8a5cade26b2a"} Jan 23 08:40:58 crc kubenswrapper[4711]: I0123 08:40:58.310772 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:40:58 crc kubenswrapper[4711]: I0123 08:40:58.310781 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-559c845968-gb6qv" event={"ID":"eb857478-48a3-4ed9-8a19-47386937c4d7","Type":"ContainerStarted","Data":"98debfc471bfac73279442aefc54be58b345b0a50ad2ae42a52f12e2ef9b049a"} Jan 23 08:40:58 crc kubenswrapper[4711]: I0123 08:40:58.328213 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/placement-559c845968-gb6qv" podStartSLOduration=2.328196478 podStartE2EDuration="2.328196478s" podCreationTimestamp="2026-01-23 08:40:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:40:58.325695326 +0000 UTC m=+1243.898651694" watchObservedRunningTime="2026-01-23 08:40:58.328196478 +0000 UTC m=+1243.901152846" Jan 23 08:40:59 crc kubenswrapper[4711]: I0123 08:40:59.320981 4711 generic.go:334] "Generic (PLEG): container finished" podID="ff3509f4-218f-4ca4-8f1a-4533c8b4ca49" containerID="29498e8723bec53c07ceb121f585404a7d28138b00bc83f2c525b61a8a00336d" exitCode=0 Jan 23 08:40:59 crc kubenswrapper[4711]: I0123 08:40:59.321076 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-l6szl" event={"ID":"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49","Type":"ContainerDied","Data":"29498e8723bec53c07ceb121f585404a7d28138b00bc83f2c525b61a8a00336d"} Jan 23 08:40:59 crc kubenswrapper[4711]: I0123 08:40:59.321431 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.620937 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.738108 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twsbs\" (UniqueName: \"kubernetes.io/projected/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-kube-api-access-twsbs\") pod \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.738150 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-fernet-keys\") pod \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.738203 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-credential-keys\") pod \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.738220 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-config-data\") pod \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.738245 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-combined-ca-bundle\") pod \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.738311 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-scripts\") pod \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\" (UID: \"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49\") " Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.743692 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-scripts" (OuterVolumeSpecName: "scripts") pod "ff3509f4-218f-4ca4-8f1a-4533c8b4ca49" (UID: "ff3509f4-218f-4ca4-8f1a-4533c8b4ca49"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.744190 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-kube-api-access-twsbs" (OuterVolumeSpecName: "kube-api-access-twsbs") pod "ff3509f4-218f-4ca4-8f1a-4533c8b4ca49" (UID: "ff3509f4-218f-4ca4-8f1a-4533c8b4ca49"). InnerVolumeSpecName "kube-api-access-twsbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.744637 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ff3509f4-218f-4ca4-8f1a-4533c8b4ca49" (UID: "ff3509f4-218f-4ca4-8f1a-4533c8b4ca49"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.747656 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ff3509f4-218f-4ca4-8f1a-4533c8b4ca49" (UID: "ff3509f4-218f-4ca4-8f1a-4533c8b4ca49"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.767517 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff3509f4-218f-4ca4-8f1a-4533c8b4ca49" (UID: "ff3509f4-218f-4ca4-8f1a-4533c8b4ca49"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.768265 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-config-data" (OuterVolumeSpecName: "config-data") pod "ff3509f4-218f-4ca4-8f1a-4533c8b4ca49" (UID: "ff3509f4-218f-4ca4-8f1a-4533c8b4ca49"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.839683 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.839913 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twsbs\" (UniqueName: \"kubernetes.io/projected/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-kube-api-access-twsbs\") on node \"crc\" DevicePath \"\"" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.840036 4711 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.840118 4711 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.840189 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:41:00 crc kubenswrapper[4711]: I0123 08:41:00.840250 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.339877 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-l6szl" event={"ID":"ff3509f4-218f-4ca4-8f1a-4533c8b4ca49","Type":"ContainerDied","Data":"c1d05f5f4f22321f9f1f804db0814fc34600c32c86c377456c67f68d0554227a"} Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.340194 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1d05f5f4f22321f9f1f804db0814fc34600c32c86c377456c67f68d0554227a" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.339971 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-l6szl" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.417243 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-784948c4bd-c7q8r"] Jan 23 08:41:01 crc kubenswrapper[4711]: E0123 08:41:01.417769 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff3509f4-218f-4ca4-8f1a-4533c8b4ca49" containerName="keystone-bootstrap" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.417796 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff3509f4-218f-4ca4-8f1a-4533c8b4ca49" containerName="keystone-bootstrap" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.418016 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff3509f4-218f-4ca4-8f1a-4533c8b4ca49" containerName="keystone-bootstrap" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.418718 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.421357 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.421443 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-scripts" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.427691 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-784948c4bd-c7q8r"] Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.437271 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-keystone-dockercfg-9rrpp" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.437622 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-config-data" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.551845 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rjhv\" (UniqueName: \"kubernetes.io/projected/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-kube-api-access-8rjhv\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.551938 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-credential-keys\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.551993 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-scripts\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.552036 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-combined-ca-bundle\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.552055 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-config-data\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.552076 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-fernet-keys\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.653802 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-credential-keys\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.653875 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-scripts\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.653913 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-combined-ca-bundle\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.653935 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-config-data\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.653955 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-fernet-keys\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.654027 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rjhv\" (UniqueName: \"kubernetes.io/projected/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-kube-api-access-8rjhv\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.657311 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-fernet-keys\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.657652 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-credential-keys\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.658769 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-scripts\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.659593 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-combined-ca-bundle\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.663147 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-config-data\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.672525 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rjhv\" (UniqueName: \"kubernetes.io/projected/b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7-kube-api-access-8rjhv\") pod \"keystone-784948c4bd-c7q8r\" (UID: \"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7\") " pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:01 crc kubenswrapper[4711]: I0123 08:41:01.743311 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:02 crc kubenswrapper[4711]: I0123 08:41:02.205072 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-784948c4bd-c7q8r"] Jan 23 08:41:02 crc kubenswrapper[4711]: W0123 08:41:02.351702 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8cff513_6ae8_4f64_b9e5_7dfa6cd9f6d7.slice/crio-a52030fd7a27182343167e77121cb8c9ddbc7bca54e53e71194eedd6a4086e02 WatchSource:0}: Error finding container a52030fd7a27182343167e77121cb8c9ddbc7bca54e53e71194eedd6a4086e02: Status 404 returned error can't find the container with id a52030fd7a27182343167e77121cb8c9ddbc7bca54e53e71194eedd6a4086e02 Jan 23 08:41:03 crc kubenswrapper[4711]: I0123 08:41:03.354588 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" event={"ID":"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7","Type":"ContainerStarted","Data":"c7faf6a92fbe3b1d362b2ab6dc85a64648d47d143ca2abacd917f995db9fc19c"} Jan 23 08:41:03 crc kubenswrapper[4711]: I0123 08:41:03.354635 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" event={"ID":"b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7","Type":"ContainerStarted","Data":"a52030fd7a27182343167e77121cb8c9ddbc7bca54e53e71194eedd6a4086e02"} Jan 23 08:41:03 crc kubenswrapper[4711]: I0123 08:41:03.354742 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:03 crc kubenswrapper[4711]: I0123 08:41:03.372185 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" podStartSLOduration=2.372166775 podStartE2EDuration="2.372166775s" podCreationTimestamp="2026-01-23 08:41:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:41:03.370837532 +0000 UTC m=+1248.943793910" watchObservedRunningTime="2026-01-23 08:41:03.372166775 +0000 UTC m=+1248.945123153" Jan 23 08:41:25 crc kubenswrapper[4711]: I0123 08:41:25.993067 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:41:25 crc kubenswrapper[4711]: I0123 08:41:25.993736 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:41:25 crc kubenswrapper[4711]: I0123 08:41:25.993782 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:41:25 crc kubenswrapper[4711]: I0123 08:41:25.996726 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9ef6df8407452842a81bff9ff371dec1b0be0a97894fe9cf1da32e295f2f3558"} pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 23 08:41:25 crc kubenswrapper[4711]: I0123 08:41:25.996810 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" containerID="cri-o://9ef6df8407452842a81bff9ff371dec1b0be0a97894fe9cf1da32e295f2f3558" gracePeriod=600 Jan 23 08:41:31 crc kubenswrapper[4711]: I0123 08:41:31.520160 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:41:31 crc kubenswrapper[4711]: I0123 08:41:31.523332 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/placement-559c845968-gb6qv" Jan 23 08:41:31 crc kubenswrapper[4711]: I0123 08:41:31.716778 4711 generic.go:334] "Generic (PLEG): container finished" podID="3846d4e0-cfda-4e0b-8747-85267de12736" containerID="9ef6df8407452842a81bff9ff371dec1b0be0a97894fe9cf1da32e295f2f3558" exitCode=0 Jan 23 08:41:31 crc kubenswrapper[4711]: I0123 08:41:31.716825 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerDied","Data":"9ef6df8407452842a81bff9ff371dec1b0be0a97894fe9cf1da32e295f2f3558"} Jan 23 08:41:31 crc kubenswrapper[4711]: I0123 08:41:31.716883 4711 scope.go:117] "RemoveContainer" containerID="ba3caaa2b687a97a0322bba7ecb4eece08eed4af73c49fa085ce275a1fc9329c" Jan 23 08:41:34 crc kubenswrapper[4711]: I0123 08:41:34.632258 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/keystone-784948c4bd-c7q8r" Jan 23 08:41:34 crc kubenswrapper[4711]: I0123 08:41:34.739974 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"509b113ae3fd960091847020bbc2a0f41a3fb8b6e06cdd9e7afa31b3382efa17"} Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.076558 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.078284 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.088449 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-config" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.088790 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstackclient-openstackclient-dockercfg-xqhx6" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.088781 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.089132 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstack-config-secret" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.192430 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-openstack-config-secret\") pod \"openstackclient\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.193050 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wvvd\" (UniqueName: \"kubernetes.io/projected/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-kube-api-access-5wvvd\") pod \"openstackclient\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.193194 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-combined-ca-bundle\") pod \"openstackclient\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.193278 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-openstack-config\") pod \"openstackclient\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.294334 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-openstack-config-secret\") pod \"openstackclient\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.294399 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wvvd\" (UniqueName: \"kubernetes.io/projected/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-kube-api-access-5wvvd\") pod \"openstackclient\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.294424 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-combined-ca-bundle\") pod \"openstackclient\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.294449 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-openstack-config\") pod \"openstackclient\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.295633 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-openstack-config\") pod \"openstackclient\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.308239 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-openstack-config-secret\") pod \"openstackclient\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.309591 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-combined-ca-bundle\") pod \"openstackclient\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.312621 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wvvd\" (UniqueName: \"kubernetes.io/projected/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-kube-api-access-5wvvd\") pod \"openstackclient\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.376861 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.377651 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.382542 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.411116 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.412163 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.429470 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.497105 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3ac7d3e6-9992-4a83-bbff-8c99ef784b20-openstack-config-secret\") pod \"openstackclient\" (UID: \"3ac7d3e6-9992-4a83-bbff-8c99ef784b20\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.497158 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ac7d3e6-9992-4a83-bbff-8c99ef784b20-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3ac7d3e6-9992-4a83-bbff-8c99ef784b20\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.497248 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8qcj\" (UniqueName: \"kubernetes.io/projected/3ac7d3e6-9992-4a83-bbff-8c99ef784b20-kube-api-access-k8qcj\") pod \"openstackclient\" (UID: \"3ac7d3e6-9992-4a83-bbff-8c99ef784b20\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.497295 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3ac7d3e6-9992-4a83-bbff-8c99ef784b20-openstack-config\") pod \"openstackclient\" (UID: \"3ac7d3e6-9992-4a83-bbff-8c99ef784b20\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.598278 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3ac7d3e6-9992-4a83-bbff-8c99ef784b20-openstack-config-secret\") pod \"openstackclient\" (UID: \"3ac7d3e6-9992-4a83-bbff-8c99ef784b20\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.598333 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ac7d3e6-9992-4a83-bbff-8c99ef784b20-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3ac7d3e6-9992-4a83-bbff-8c99ef784b20\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.598391 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8qcj\" (UniqueName: \"kubernetes.io/projected/3ac7d3e6-9992-4a83-bbff-8c99ef784b20-kube-api-access-k8qcj\") pod \"openstackclient\" (UID: \"3ac7d3e6-9992-4a83-bbff-8c99ef784b20\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.598419 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3ac7d3e6-9992-4a83-bbff-8c99ef784b20-openstack-config\") pod \"openstackclient\" (UID: \"3ac7d3e6-9992-4a83-bbff-8c99ef784b20\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.599263 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3ac7d3e6-9992-4a83-bbff-8c99ef784b20-openstack-config\") pod \"openstackclient\" (UID: \"3ac7d3e6-9992-4a83-bbff-8c99ef784b20\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.642759 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ac7d3e6-9992-4a83-bbff-8c99ef784b20-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3ac7d3e6-9992-4a83-bbff-8c99ef784b20\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.642995 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3ac7d3e6-9992-4a83-bbff-8c99ef784b20-openstack-config-secret\") pod \"openstackclient\" (UID: \"3ac7d3e6-9992-4a83-bbff-8c99ef784b20\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.644425 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8qcj\" (UniqueName: \"kubernetes.io/projected/3ac7d3e6-9992-4a83-bbff-8c99ef784b20-kube-api-access-k8qcj\") pod \"openstackclient\" (UID: \"3ac7d3e6-9992-4a83-bbff-8c99ef784b20\") " pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: E0123 08:41:38.728269 4711 log.go:32] "RunPodSandbox from runtime service failed" err=< Jan 23 08:41:38 crc kubenswrapper[4711]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_nova-kuttl-default_2d5d6daa-e32f-4696-bfd4-6c8c382f6915_0(ee87b8ae4ee0785d4f424e47fcfd37ecb9c1eb807c0955e13c8e919684ce9718): error adding pod nova-kuttl-default_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"ee87b8ae4ee0785d4f424e47fcfd37ecb9c1eb807c0955e13c8e919684ce9718" Netns:"/var/run/netns/e9c4e4ad-72d7-49d9-b1be-24a8f337efef" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=nova-kuttl-default;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=ee87b8ae4ee0785d4f424e47fcfd37ecb9c1eb807c0955e13c8e919684ce9718;K8S_POD_UID=2d5d6daa-e32f-4696-bfd4-6c8c382f6915" Path:"" ERRORED: error configuring pod [nova-kuttl-default/openstackclient] networking: Multus: [nova-kuttl-default/openstackclient/2d5d6daa-e32f-4696-bfd4-6c8c382f6915]: expected pod UID "2d5d6daa-e32f-4696-bfd4-6c8c382f6915" but got "3ac7d3e6-9992-4a83-bbff-8c99ef784b20" from Kube API Jan 23 08:41:38 crc kubenswrapper[4711]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 23 08:41:38 crc kubenswrapper[4711]: > Jan 23 08:41:38 crc kubenswrapper[4711]: E0123 08:41:38.728353 4711 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Jan 23 08:41:38 crc kubenswrapper[4711]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_nova-kuttl-default_2d5d6daa-e32f-4696-bfd4-6c8c382f6915_0(ee87b8ae4ee0785d4f424e47fcfd37ecb9c1eb807c0955e13c8e919684ce9718): error adding pod nova-kuttl-default_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"ee87b8ae4ee0785d4f424e47fcfd37ecb9c1eb807c0955e13c8e919684ce9718" Netns:"/var/run/netns/e9c4e4ad-72d7-49d9-b1be-24a8f337efef" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=nova-kuttl-default;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=ee87b8ae4ee0785d4f424e47fcfd37ecb9c1eb807c0955e13c8e919684ce9718;K8S_POD_UID=2d5d6daa-e32f-4696-bfd4-6c8c382f6915" Path:"" ERRORED: error configuring pod [nova-kuttl-default/openstackclient] networking: Multus: [nova-kuttl-default/openstackclient/2d5d6daa-e32f-4696-bfd4-6c8c382f6915]: expected pod UID "2d5d6daa-e32f-4696-bfd4-6c8c382f6915" but got "3ac7d3e6-9992-4a83-bbff-8c99ef784b20" from Kube API Jan 23 08:41:38 crc kubenswrapper[4711]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Jan 23 08:41:38 crc kubenswrapper[4711]: > pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.732012 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.785002 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.788988 4711 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="nova-kuttl-default/openstackclient" oldPodUID="2d5d6daa-e32f-4696-bfd4-6c8c382f6915" podUID="3ac7d3e6-9992-4a83-bbff-8c99ef784b20" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.799479 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.923179 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-openstack-config-secret\") pod \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.923628 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-combined-ca-bundle\") pod \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.923781 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wvvd\" (UniqueName: \"kubernetes.io/projected/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-kube-api-access-5wvvd\") pod \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.923830 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-openstack-config\") pod \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\" (UID: \"2d5d6daa-e32f-4696-bfd4-6c8c382f6915\") " Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.925007 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "2d5d6daa-e32f-4696-bfd4-6c8c382f6915" (UID: "2d5d6daa-e32f-4696-bfd4-6c8c382f6915"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.958663 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "2d5d6daa-e32f-4696-bfd4-6c8c382f6915" (UID: "2d5d6daa-e32f-4696-bfd4-6c8c382f6915"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.961851 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d5d6daa-e32f-4696-bfd4-6c8c382f6915" (UID: "2d5d6daa-e32f-4696-bfd4-6c8c382f6915"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:41:38 crc kubenswrapper[4711]: I0123 08:41:38.961876 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-kube-api-access-5wvvd" (OuterVolumeSpecName: "kube-api-access-5wvvd") pod "2d5d6daa-e32f-4696-bfd4-6c8c382f6915" (UID: "2d5d6daa-e32f-4696-bfd4-6c8c382f6915"). InnerVolumeSpecName "kube-api-access-5wvvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:41:39 crc kubenswrapper[4711]: I0123 08:41:39.026326 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wvvd\" (UniqueName: \"kubernetes.io/projected/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-kube-api-access-5wvvd\") on node \"crc\" DevicePath \"\"" Jan 23 08:41:39 crc kubenswrapper[4711]: I0123 08:41:39.026357 4711 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 23 08:41:39 crc kubenswrapper[4711]: I0123 08:41:39.026367 4711 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 23 08:41:39 crc kubenswrapper[4711]: I0123 08:41:39.026375 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d5d6daa-e32f-4696-bfd4-6c8c382f6915-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:41:39 crc kubenswrapper[4711]: I0123 08:41:39.301847 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 23 08:41:39 crc kubenswrapper[4711]: I0123 08:41:39.482168 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d5d6daa-e32f-4696-bfd4-6c8c382f6915" path="/var/lib/kubelet/pods/2d5d6daa-e32f-4696-bfd4-6c8c382f6915/volumes" Jan 23 08:41:39 crc kubenswrapper[4711]: I0123 08:41:39.797107 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 23 08:41:39 crc kubenswrapper[4711]: I0123 08:41:39.797089 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstackclient" event={"ID":"3ac7d3e6-9992-4a83-bbff-8c99ef784b20","Type":"ContainerStarted","Data":"a66117c94cd245e1139e243dad7ce2bee03c368716eda4ee1981e12e4b31f5e8"} Jan 23 08:41:39 crc kubenswrapper[4711]: I0123 08:41:39.803591 4711 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="nova-kuttl-default/openstackclient" oldPodUID="2d5d6daa-e32f-4696-bfd4-6c8c382f6915" podUID="3ac7d3e6-9992-4a83-bbff-8c99ef784b20" Jan 23 08:41:54 crc kubenswrapper[4711]: E0123 08:41:54.506198 4711 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified" Jan 23 08:41:54 crc kubenswrapper[4711]: E0123 08:41:54.506913 4711 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n645h59h5d5h698h5f4h7h7fh65ch646h68ch99h6dh64fh7h5ffh585h7dh574h5d9h66h5f7h87h54fh68h56fh696h58ch9fh699h68h5dfh5bdq,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.nova-kuttl-default.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k8qcj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_nova-kuttl-default(3ac7d3e6-9992-4a83-bbff-8c99ef784b20): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 23 08:41:54 crc kubenswrapper[4711]: E0123 08:41:54.508248 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="nova-kuttl-default/openstackclient" podUID="3ac7d3e6-9992-4a83-bbff-8c99ef784b20" Jan 23 08:41:54 crc kubenswrapper[4711]: E0123 08:41:54.973371 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified\\\"\"" pod="nova-kuttl-default/openstackclient" podUID="3ac7d3e6-9992-4a83-bbff-8c99ef784b20" Jan 23 08:42:12 crc kubenswrapper[4711]: I0123 08:42:12.102787 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstackclient" event={"ID":"3ac7d3e6-9992-4a83-bbff-8c99ef784b20","Type":"ContainerStarted","Data":"508c5eaa452d65e589389dfd4ee1e6b526610b9ca2876c3fded5fb6d14271f1a"} Jan 23 08:42:12 crc kubenswrapper[4711]: I0123 08:42:12.123023 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/openstackclient" podStartSLOduration=1.847405884 podStartE2EDuration="34.123002146s" podCreationTimestamp="2026-01-23 08:41:38 +0000 UTC" firstStartedPulling="2026-01-23 08:41:39.31851684 +0000 UTC m=+1284.891473208" lastFinishedPulling="2026-01-23 08:42:11.594113102 +0000 UTC m=+1317.167069470" observedRunningTime="2026-01-23 08:42:12.120449773 +0000 UTC m=+1317.693406141" watchObservedRunningTime="2026-01-23 08:42:12.123002146 +0000 UTC m=+1317.695958534" Jan 23 08:42:19 crc kubenswrapper[4711]: I0123 08:42:19.214038 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9"] Jan 23 08:42:19 crc kubenswrapper[4711]: I0123 08:42:19.214812 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" podUID="b302d05c-2499-4bf0-a271-29e930bf8c0d" containerName="operator" containerID="cri-o://d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341" gracePeriod=10 Jan 23 08:42:19 crc kubenswrapper[4711]: I0123 08:42:19.520729 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p"] Jan 23 08:42:19 crc kubenswrapper[4711]: I0123 08:42:19.521232 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" podUID="20db23eb-eb12-458a-9c9e-164f8e3bcab7" containerName="manager" containerID="cri-o://6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8" gracePeriod=10 Jan 23 08:42:19 crc kubenswrapper[4711]: I0123 08:42:19.700540 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" Jan 23 08:42:19 crc kubenswrapper[4711]: I0123 08:42:19.810733 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nnk9\" (UniqueName: \"kubernetes.io/projected/b302d05c-2499-4bf0-a271-29e930bf8c0d-kube-api-access-7nnk9\") pod \"b302d05c-2499-4bf0-a271-29e930bf8c0d\" (UID: \"b302d05c-2499-4bf0-a271-29e930bf8c0d\") " Jan 23 08:42:19 crc kubenswrapper[4711]: I0123 08:42:19.816131 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b302d05c-2499-4bf0-a271-29e930bf8c0d-kube-api-access-7nnk9" (OuterVolumeSpecName: "kube-api-access-7nnk9") pod "b302d05c-2499-4bf0-a271-29e930bf8c0d" (UID: "b302d05c-2499-4bf0-a271-29e930bf8c0d"). InnerVolumeSpecName "kube-api-access-7nnk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:42:19 crc kubenswrapper[4711]: I0123 08:42:19.913206 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nnk9\" (UniqueName: \"kubernetes.io/projected/b302d05c-2499-4bf0-a271-29e930bf8c0d-kube-api-access-7nnk9\") on node \"crc\" DevicePath \"\"" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.090048 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.178053 4711 generic.go:334] "Generic (PLEG): container finished" podID="20db23eb-eb12-458a-9c9e-164f8e3bcab7" containerID="6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8" exitCode=0 Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.178111 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.178149 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" event={"ID":"20db23eb-eb12-458a-9c9e-164f8e3bcab7","Type":"ContainerDied","Data":"6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8"} Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.178206 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p" event={"ID":"20db23eb-eb12-458a-9c9e-164f8e3bcab7","Type":"ContainerDied","Data":"06284ce3aff2c83d71a53b02cdda58e4a8409b1df983de179026498e3174c33f"} Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.178229 4711 scope.go:117] "RemoveContainer" containerID="6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.183832 4711 generic.go:334] "Generic (PLEG): container finished" podID="b302d05c-2499-4bf0-a271-29e930bf8c0d" containerID="d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341" exitCode=0 Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.183892 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" event={"ID":"b302d05c-2499-4bf0-a271-29e930bf8c0d","Type":"ContainerDied","Data":"d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341"} Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.183927 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" event={"ID":"b302d05c-2499-4bf0-a271-29e930bf8c0d","Type":"ContainerDied","Data":"35ac33e007b854449daf2cb00ec9217bf04c93fa53d983d95b5102bb3a04daf3"} Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.183995 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.198604 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-index-2kvnc"] Jan 23 08:42:20 crc kubenswrapper[4711]: E0123 08:42:20.199037 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b302d05c-2499-4bf0-a271-29e930bf8c0d" containerName="operator" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.199052 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b302d05c-2499-4bf0-a271-29e930bf8c0d" containerName="operator" Jan 23 08:42:20 crc kubenswrapper[4711]: E0123 08:42:20.199086 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20db23eb-eb12-458a-9c9e-164f8e3bcab7" containerName="manager" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.199094 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="20db23eb-eb12-458a-9c9e-164f8e3bcab7" containerName="manager" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.199257 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="20db23eb-eb12-458a-9c9e-164f8e3bcab7" containerName="manager" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.199284 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b302d05c-2499-4bf0-a271-29e930bf8c0d" containerName="operator" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.199928 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-2kvnc" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.208373 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-index-dockercfg-fn6cw" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.208784 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-index-2kvnc"] Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.219355 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lpzpp\" (UniqueName: \"kubernetes.io/projected/20db23eb-eb12-458a-9c9e-164f8e3bcab7-kube-api-access-lpzpp\") pod \"20db23eb-eb12-458a-9c9e-164f8e3bcab7\" (UID: \"20db23eb-eb12-458a-9c9e-164f8e3bcab7\") " Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.230113 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20db23eb-eb12-458a-9c9e-164f8e3bcab7-kube-api-access-lpzpp" (OuterVolumeSpecName: "kube-api-access-lpzpp") pod "20db23eb-eb12-458a-9c9e-164f8e3bcab7" (UID: "20db23eb-eb12-458a-9c9e-164f8e3bcab7"). InnerVolumeSpecName "kube-api-access-lpzpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.251677 4711 scope.go:117] "RemoveContainer" containerID="6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8" Jan 23 08:42:20 crc kubenswrapper[4711]: E0123 08:42:20.255824 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8\": container with ID starting with 6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8 not found: ID does not exist" containerID="6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.255883 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8"} err="failed to get container status \"6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8\": rpc error: code = NotFound desc = could not find container \"6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8\": container with ID starting with 6b8be6370443e6621f6fbe1adcf9da2e3f2da110ecfd02ba01c2c7d20bbc75a8 not found: ID does not exist" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.255916 4711 scope.go:117] "RemoveContainer" containerID="d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.281866 4711 scope.go:117] "RemoveContainer" containerID="d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341" Jan 23 08:42:20 crc kubenswrapper[4711]: E0123 08:42:20.282407 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341\": container with ID starting with d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341 not found: ID does not exist" containerID="d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.282449 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341"} err="failed to get container status \"d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341\": rpc error: code = NotFound desc = could not find container \"d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341\": container with ID starting with d2ccbbe104b0fe806d52327b913d12d3b9aa75d81463ae164030c697def9b341 not found: ID does not exist" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.283705 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9"] Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.289008 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-controller-init-59f56ff984-tx7z9"] Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.320570 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg277\" (UniqueName: \"kubernetes.io/projected/1090978f-eecc-4ffc-bb9e-eecbdad1a7e2-kube-api-access-dg277\") pod \"nova-operator-index-2kvnc\" (UID: \"1090978f-eecc-4ffc-bb9e-eecbdad1a7e2\") " pod="openstack-operators/nova-operator-index-2kvnc" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.320753 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lpzpp\" (UniqueName: \"kubernetes.io/projected/20db23eb-eb12-458a-9c9e-164f8e3bcab7-kube-api-access-lpzpp\") on node \"crc\" DevicePath \"\"" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.421604 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg277\" (UniqueName: \"kubernetes.io/projected/1090978f-eecc-4ffc-bb9e-eecbdad1a7e2-kube-api-access-dg277\") pod \"nova-operator-index-2kvnc\" (UID: \"1090978f-eecc-4ffc-bb9e-eecbdad1a7e2\") " pod="openstack-operators/nova-operator-index-2kvnc" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.446449 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg277\" (UniqueName: \"kubernetes.io/projected/1090978f-eecc-4ffc-bb9e-eecbdad1a7e2-kube-api-access-dg277\") pod \"nova-operator-index-2kvnc\" (UID: \"1090978f-eecc-4ffc-bb9e-eecbdad1a7e2\") " pod="openstack-operators/nova-operator-index-2kvnc" Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.507205 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p"] Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.514015 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5db5449586-d5x8p"] Jan 23 08:42:20 crc kubenswrapper[4711]: I0123 08:42:20.562703 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-2kvnc" Jan 23 08:42:21 crc kubenswrapper[4711]: I0123 08:42:21.330950 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-index-2kvnc"] Jan 23 08:42:21 crc kubenswrapper[4711]: W0123 08:42:21.347735 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1090978f_eecc_4ffc_bb9e_eecbdad1a7e2.slice/crio-8ffdacc5b8a95b6b69f52d57c20032158eb4538ffa8c35992e41e5ca367747b4 WatchSource:0}: Error finding container 8ffdacc5b8a95b6b69f52d57c20032158eb4538ffa8c35992e41e5ca367747b4: Status 404 returned error can't find the container with id 8ffdacc5b8a95b6b69f52d57c20032158eb4538ffa8c35992e41e5ca367747b4 Jan 23 08:42:21 crc kubenswrapper[4711]: I0123 08:42:21.486810 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20db23eb-eb12-458a-9c9e-164f8e3bcab7" path="/var/lib/kubelet/pods/20db23eb-eb12-458a-9c9e-164f8e3bcab7/volumes" Jan 23 08:42:21 crc kubenswrapper[4711]: I0123 08:42:21.487483 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b302d05c-2499-4bf0-a271-29e930bf8c0d" path="/var/lib/kubelet/pods/b302d05c-2499-4bf0-a271-29e930bf8c0d/volumes" Jan 23 08:42:22 crc kubenswrapper[4711]: I0123 08:42:22.201909 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-2kvnc" event={"ID":"1090978f-eecc-4ffc-bb9e-eecbdad1a7e2","Type":"ContainerStarted","Data":"7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04"} Jan 23 08:42:22 crc kubenswrapper[4711]: I0123 08:42:22.202208 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-2kvnc" event={"ID":"1090978f-eecc-4ffc-bb9e-eecbdad1a7e2","Type":"ContainerStarted","Data":"8ffdacc5b8a95b6b69f52d57c20032158eb4538ffa8c35992e41e5ca367747b4"} Jan 23 08:42:22 crc kubenswrapper[4711]: I0123 08:42:22.220783 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-index-2kvnc" podStartSLOduration=2.031432947 podStartE2EDuration="2.220760674s" podCreationTimestamp="2026-01-23 08:42:20 +0000 UTC" firstStartedPulling="2026-01-23 08:42:21.350358105 +0000 UTC m=+1326.923314473" lastFinishedPulling="2026-01-23 08:42:21.539685832 +0000 UTC m=+1327.112642200" observedRunningTime="2026-01-23 08:42:22.219681757 +0000 UTC m=+1327.792638125" watchObservedRunningTime="2026-01-23 08:42:22.220760674 +0000 UTC m=+1327.793717042" Jan 23 08:42:22 crc kubenswrapper[4711]: I0123 08:42:22.811081 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-index-2kvnc"] Jan 23 08:42:23 crc kubenswrapper[4711]: I0123 08:42:23.231630 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-index-tm9mb"] Jan 23 08:42:23 crc kubenswrapper[4711]: I0123 08:42:23.251582 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-tm9mb" Jan 23 08:42:23 crc kubenswrapper[4711]: I0123 08:42:23.270093 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbv4g\" (UniqueName: \"kubernetes.io/projected/0bbe37a8-f874-4b6b-8f5d-35c1f4a463cc-kube-api-access-jbv4g\") pod \"nova-operator-index-tm9mb\" (UID: \"0bbe37a8-f874-4b6b-8f5d-35c1f4a463cc\") " pod="openstack-operators/nova-operator-index-tm9mb" Jan 23 08:42:23 crc kubenswrapper[4711]: I0123 08:42:23.282445 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-index-tm9mb"] Jan 23 08:42:23 crc kubenswrapper[4711]: I0123 08:42:23.373023 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbv4g\" (UniqueName: \"kubernetes.io/projected/0bbe37a8-f874-4b6b-8f5d-35c1f4a463cc-kube-api-access-jbv4g\") pod \"nova-operator-index-tm9mb\" (UID: \"0bbe37a8-f874-4b6b-8f5d-35c1f4a463cc\") " pod="openstack-operators/nova-operator-index-tm9mb" Jan 23 08:42:23 crc kubenswrapper[4711]: I0123 08:42:23.394890 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbv4g\" (UniqueName: \"kubernetes.io/projected/0bbe37a8-f874-4b6b-8f5d-35c1f4a463cc-kube-api-access-jbv4g\") pod \"nova-operator-index-tm9mb\" (UID: \"0bbe37a8-f874-4b6b-8f5d-35c1f4a463cc\") " pod="openstack-operators/nova-operator-index-tm9mb" Jan 23 08:42:23 crc kubenswrapper[4711]: I0123 08:42:23.614197 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-tm9mb" Jan 23 08:42:24 crc kubenswrapper[4711]: I0123 08:42:24.059860 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-index-tm9mb"] Jan 23 08:42:24 crc kubenswrapper[4711]: I0123 08:42:24.216382 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-tm9mb" event={"ID":"0bbe37a8-f874-4b6b-8f5d-35c1f4a463cc","Type":"ContainerStarted","Data":"9b270b2f38578df3cb8b6fa45134d7821f1acba2dfcb814549c21d4d43961306"} Jan 23 08:42:24 crc kubenswrapper[4711]: I0123 08:42:24.216526 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/nova-operator-index-2kvnc" podUID="1090978f-eecc-4ffc-bb9e-eecbdad1a7e2" containerName="registry-server" containerID="cri-o://7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04" gracePeriod=2 Jan 23 08:42:24 crc kubenswrapper[4711]: I0123 08:42:24.583186 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-2kvnc" Jan 23 08:42:24 crc kubenswrapper[4711]: I0123 08:42:24.593834 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dg277\" (UniqueName: \"kubernetes.io/projected/1090978f-eecc-4ffc-bb9e-eecbdad1a7e2-kube-api-access-dg277\") pod \"1090978f-eecc-4ffc-bb9e-eecbdad1a7e2\" (UID: \"1090978f-eecc-4ffc-bb9e-eecbdad1a7e2\") " Jan 23 08:42:24 crc kubenswrapper[4711]: I0123 08:42:24.601098 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1090978f-eecc-4ffc-bb9e-eecbdad1a7e2-kube-api-access-dg277" (OuterVolumeSpecName: "kube-api-access-dg277") pod "1090978f-eecc-4ffc-bb9e-eecbdad1a7e2" (UID: "1090978f-eecc-4ffc-bb9e-eecbdad1a7e2"). InnerVolumeSpecName "kube-api-access-dg277". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:42:24 crc kubenswrapper[4711]: I0123 08:42:24.695637 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dg277\" (UniqueName: \"kubernetes.io/projected/1090978f-eecc-4ffc-bb9e-eecbdad1a7e2-kube-api-access-dg277\") on node \"crc\" DevicePath \"\"" Jan 23 08:42:25 crc kubenswrapper[4711]: I0123 08:42:25.223759 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-tm9mb" event={"ID":"0bbe37a8-f874-4b6b-8f5d-35c1f4a463cc","Type":"ContainerStarted","Data":"454cff874519bd1c60928c81e3144624ac6223e4d89f80c9ce7b06ecc1c4ff96"} Jan 23 08:42:25 crc kubenswrapper[4711]: I0123 08:42:25.225598 4711 generic.go:334] "Generic (PLEG): container finished" podID="1090978f-eecc-4ffc-bb9e-eecbdad1a7e2" containerID="7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04" exitCode=0 Jan 23 08:42:25 crc kubenswrapper[4711]: I0123 08:42:25.225630 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-2kvnc" event={"ID":"1090978f-eecc-4ffc-bb9e-eecbdad1a7e2","Type":"ContainerDied","Data":"7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04"} Jan 23 08:42:25 crc kubenswrapper[4711]: I0123 08:42:25.225648 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-2kvnc" event={"ID":"1090978f-eecc-4ffc-bb9e-eecbdad1a7e2","Type":"ContainerDied","Data":"8ffdacc5b8a95b6b69f52d57c20032158eb4538ffa8c35992e41e5ca367747b4"} Jan 23 08:42:25 crc kubenswrapper[4711]: I0123 08:42:25.225666 4711 scope.go:117] "RemoveContainer" containerID="7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04" Jan 23 08:42:25 crc kubenswrapper[4711]: I0123 08:42:25.225779 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-2kvnc" Jan 23 08:42:25 crc kubenswrapper[4711]: I0123 08:42:25.246090 4711 scope.go:117] "RemoveContainer" containerID="7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04" Jan 23 08:42:25 crc kubenswrapper[4711]: E0123 08:42:25.246534 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04\": container with ID starting with 7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04 not found: ID does not exist" containerID="7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04" Jan 23 08:42:25 crc kubenswrapper[4711]: I0123 08:42:25.246592 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04"} err="failed to get container status \"7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04\": rpc error: code = NotFound desc = could not find container \"7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04\": container with ID starting with 7333a77ce918c2fc87673ab5947f2df3bc85bae57b3a066dc958f13574076e04 not found: ID does not exist" Jan 23 08:42:25 crc kubenswrapper[4711]: I0123 08:42:25.260858 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-index-tm9mb" podStartSLOduration=2.187098834 podStartE2EDuration="2.260837605s" podCreationTimestamp="2026-01-23 08:42:23 +0000 UTC" firstStartedPulling="2026-01-23 08:42:24.068089834 +0000 UTC m=+1329.641046202" lastFinishedPulling="2026-01-23 08:42:24.141828605 +0000 UTC m=+1329.714784973" observedRunningTime="2026-01-23 08:42:25.244241145 +0000 UTC m=+1330.817197513" watchObservedRunningTime="2026-01-23 08:42:25.260837605 +0000 UTC m=+1330.833793973" Jan 23 08:42:25 crc kubenswrapper[4711]: I0123 08:42:25.262832 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-index-2kvnc"] Jan 23 08:42:25 crc kubenswrapper[4711]: I0123 08:42:25.268807 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/nova-operator-index-2kvnc"] Jan 23 08:42:25 crc kubenswrapper[4711]: I0123 08:42:25.482336 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1090978f-eecc-4ffc-bb9e-eecbdad1a7e2" path="/var/lib/kubelet/pods/1090978f-eecc-4ffc-bb9e-eecbdad1a7e2/volumes" Jan 23 08:42:33 crc kubenswrapper[4711]: I0123 08:42:33.614986 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/nova-operator-index-tm9mb" Jan 23 08:42:33 crc kubenswrapper[4711]: I0123 08:42:33.615562 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-index-tm9mb" Jan 23 08:42:33 crc kubenswrapper[4711]: I0123 08:42:33.645848 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/nova-operator-index-tm9mb" Jan 23 08:42:34 crc kubenswrapper[4711]: I0123 08:42:34.321080 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-index-tm9mb" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.664939 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8"] Jan 23 08:42:42 crc kubenswrapper[4711]: E0123 08:42:42.665959 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1090978f-eecc-4ffc-bb9e-eecbdad1a7e2" containerName="registry-server" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.665981 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1090978f-eecc-4ffc-bb9e-eecbdad1a7e2" containerName="registry-server" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.666296 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="1090978f-eecc-4ffc-bb9e-eecbdad1a7e2" containerName="registry-server" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.668085 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.673105 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-n282r" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.684437 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8"] Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.709899 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee296785-739b-4103-84c5-ab2fe24f3a7c-bundle\") pod \"fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8\" (UID: \"ee296785-739b-4103-84c5-ab2fe24f3a7c\") " pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.709971 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee296785-739b-4103-84c5-ab2fe24f3a7c-util\") pod \"fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8\" (UID: \"ee296785-739b-4103-84c5-ab2fe24f3a7c\") " pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.709998 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmh9p\" (UniqueName: \"kubernetes.io/projected/ee296785-739b-4103-84c5-ab2fe24f3a7c-kube-api-access-lmh9p\") pod \"fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8\" (UID: \"ee296785-739b-4103-84c5-ab2fe24f3a7c\") " pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.813538 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee296785-739b-4103-84c5-ab2fe24f3a7c-util\") pod \"fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8\" (UID: \"ee296785-739b-4103-84c5-ab2fe24f3a7c\") " pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.813647 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmh9p\" (UniqueName: \"kubernetes.io/projected/ee296785-739b-4103-84c5-ab2fe24f3a7c-kube-api-access-lmh9p\") pod \"fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8\" (UID: \"ee296785-739b-4103-84c5-ab2fe24f3a7c\") " pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.813903 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee296785-739b-4103-84c5-ab2fe24f3a7c-bundle\") pod \"fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8\" (UID: \"ee296785-739b-4103-84c5-ab2fe24f3a7c\") " pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.814157 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee296785-739b-4103-84c5-ab2fe24f3a7c-util\") pod \"fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8\" (UID: \"ee296785-739b-4103-84c5-ab2fe24f3a7c\") " pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.814642 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee296785-739b-4103-84c5-ab2fe24f3a7c-bundle\") pod \"fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8\" (UID: \"ee296785-739b-4103-84c5-ab2fe24f3a7c\") " pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.835462 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmh9p\" (UniqueName: \"kubernetes.io/projected/ee296785-739b-4103-84c5-ab2fe24f3a7c-kube-api-access-lmh9p\") pod \"fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8\" (UID: \"ee296785-739b-4103-84c5-ab2fe24f3a7c\") " pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:42 crc kubenswrapper[4711]: I0123 08:42:42.987779 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:43 crc kubenswrapper[4711]: I0123 08:42:43.432143 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8"] Jan 23 08:42:44 crc kubenswrapper[4711]: I0123 08:42:44.362165 4711 generic.go:334] "Generic (PLEG): container finished" podID="ee296785-739b-4103-84c5-ab2fe24f3a7c" containerID="cccf0d8f2979da29f850c6ec467e009d5e4bd0765539f3886249bc8fa11654bb" exitCode=0 Jan 23 08:42:44 crc kubenswrapper[4711]: I0123 08:42:44.362238 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" event={"ID":"ee296785-739b-4103-84c5-ab2fe24f3a7c","Type":"ContainerDied","Data":"cccf0d8f2979da29f850c6ec467e009d5e4bd0765539f3886249bc8fa11654bb"} Jan 23 08:42:44 crc kubenswrapper[4711]: I0123 08:42:44.362488 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" event={"ID":"ee296785-739b-4103-84c5-ab2fe24f3a7c","Type":"ContainerStarted","Data":"81d2f03bc57086dd8870620ba8333bd980656964f9a9543b833d0fa0608abc5e"} Jan 23 08:42:44 crc kubenswrapper[4711]: I0123 08:42:44.364801 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 23 08:42:45 crc kubenswrapper[4711]: I0123 08:42:45.373640 4711 generic.go:334] "Generic (PLEG): container finished" podID="ee296785-739b-4103-84c5-ab2fe24f3a7c" containerID="b9e6893a1412fb5a2e9c38fd3692eefed748b6be336e9b6f62c0e3bdda245595" exitCode=0 Jan 23 08:42:45 crc kubenswrapper[4711]: I0123 08:42:45.373725 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" event={"ID":"ee296785-739b-4103-84c5-ab2fe24f3a7c","Type":"ContainerDied","Data":"b9e6893a1412fb5a2e9c38fd3692eefed748b6be336e9b6f62c0e3bdda245595"} Jan 23 08:42:46 crc kubenswrapper[4711]: I0123 08:42:46.382799 4711 generic.go:334] "Generic (PLEG): container finished" podID="ee296785-739b-4103-84c5-ab2fe24f3a7c" containerID="d58993d9cad85e640cedc1f895487caafbfbf2c1b526d6c170031e03ad2efc88" exitCode=0 Jan 23 08:42:46 crc kubenswrapper[4711]: I0123 08:42:46.382878 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" event={"ID":"ee296785-739b-4103-84c5-ab2fe24f3a7c","Type":"ContainerDied","Data":"d58993d9cad85e640cedc1f895487caafbfbf2c1b526d6c170031e03ad2efc88"} Jan 23 08:42:47 crc kubenswrapper[4711]: I0123 08:42:47.693033 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:47 crc kubenswrapper[4711]: I0123 08:42:47.803202 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmh9p\" (UniqueName: \"kubernetes.io/projected/ee296785-739b-4103-84c5-ab2fe24f3a7c-kube-api-access-lmh9p\") pod \"ee296785-739b-4103-84c5-ab2fe24f3a7c\" (UID: \"ee296785-739b-4103-84c5-ab2fe24f3a7c\") " Jan 23 08:42:47 crc kubenswrapper[4711]: I0123 08:42:47.803318 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee296785-739b-4103-84c5-ab2fe24f3a7c-util\") pod \"ee296785-739b-4103-84c5-ab2fe24f3a7c\" (UID: \"ee296785-739b-4103-84c5-ab2fe24f3a7c\") " Jan 23 08:42:47 crc kubenswrapper[4711]: I0123 08:42:47.803342 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee296785-739b-4103-84c5-ab2fe24f3a7c-bundle\") pod \"ee296785-739b-4103-84c5-ab2fe24f3a7c\" (UID: \"ee296785-739b-4103-84c5-ab2fe24f3a7c\") " Jan 23 08:42:47 crc kubenswrapper[4711]: I0123 08:42:47.805286 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee296785-739b-4103-84c5-ab2fe24f3a7c-bundle" (OuterVolumeSpecName: "bundle") pod "ee296785-739b-4103-84c5-ab2fe24f3a7c" (UID: "ee296785-739b-4103-84c5-ab2fe24f3a7c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:42:47 crc kubenswrapper[4711]: I0123 08:42:47.809833 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee296785-739b-4103-84c5-ab2fe24f3a7c-kube-api-access-lmh9p" (OuterVolumeSpecName: "kube-api-access-lmh9p") pod "ee296785-739b-4103-84c5-ab2fe24f3a7c" (UID: "ee296785-739b-4103-84c5-ab2fe24f3a7c"). InnerVolumeSpecName "kube-api-access-lmh9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:42:47 crc kubenswrapper[4711]: I0123 08:42:47.821370 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee296785-739b-4103-84c5-ab2fe24f3a7c-util" (OuterVolumeSpecName: "util") pod "ee296785-739b-4103-84c5-ab2fe24f3a7c" (UID: "ee296785-739b-4103-84c5-ab2fe24f3a7c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:42:47 crc kubenswrapper[4711]: I0123 08:42:47.905536 4711 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee296785-739b-4103-84c5-ab2fe24f3a7c-util\") on node \"crc\" DevicePath \"\"" Jan 23 08:42:47 crc kubenswrapper[4711]: I0123 08:42:47.905961 4711 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee296785-739b-4103-84c5-ab2fe24f3a7c-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 08:42:47 crc kubenswrapper[4711]: I0123 08:42:47.906124 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmh9p\" (UniqueName: \"kubernetes.io/projected/ee296785-739b-4103-84c5-ab2fe24f3a7c-kube-api-access-lmh9p\") on node \"crc\" DevicePath \"\"" Jan 23 08:42:48 crc kubenswrapper[4711]: I0123 08:42:48.399543 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" event={"ID":"ee296785-739b-4103-84c5-ab2fe24f3a7c","Type":"ContainerDied","Data":"81d2f03bc57086dd8870620ba8333bd980656964f9a9543b833d0fa0608abc5e"} Jan 23 08:42:48 crc kubenswrapper[4711]: I0123 08:42:48.399595 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81d2f03bc57086dd8870620ba8333bd980656964f9a9543b833d0fa0608abc5e" Jan 23 08:42:48 crc kubenswrapper[4711]: I0123 08:42:48.399739 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.185232 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz"] Jan 23 08:42:54 crc kubenswrapper[4711]: E0123 08:42:54.185801 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee296785-739b-4103-84c5-ab2fe24f3a7c" containerName="pull" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.185814 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee296785-739b-4103-84c5-ab2fe24f3a7c" containerName="pull" Jan 23 08:42:54 crc kubenswrapper[4711]: E0123 08:42:54.185829 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee296785-739b-4103-84c5-ab2fe24f3a7c" containerName="extract" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.185835 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee296785-739b-4103-84c5-ab2fe24f3a7c" containerName="extract" Jan 23 08:42:54 crc kubenswrapper[4711]: E0123 08:42:54.185851 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee296785-739b-4103-84c5-ab2fe24f3a7c" containerName="util" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.185859 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee296785-739b-4103-84c5-ab2fe24f3a7c" containerName="util" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.185989 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee296785-739b-4103-84c5-ab2fe24f3a7c" containerName="extract" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.186451 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.188932 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-4nnzp" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.189175 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-service-cert" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.205657 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz"] Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.346251 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c9735021-bb7c-43ac-b95e-a4fd2d26c84a-webhook-cert\") pod \"nova-operator-controller-manager-6b684b99ff-stnvz\" (UID: \"c9735021-bb7c-43ac-b95e-a4fd2d26c84a\") " pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.346373 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c9735021-bb7c-43ac-b95e-a4fd2d26c84a-apiservice-cert\") pod \"nova-operator-controller-manager-6b684b99ff-stnvz\" (UID: \"c9735021-bb7c-43ac-b95e-a4fd2d26c84a\") " pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.346423 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvdcg\" (UniqueName: \"kubernetes.io/projected/c9735021-bb7c-43ac-b95e-a4fd2d26c84a-kube-api-access-rvdcg\") pod \"nova-operator-controller-manager-6b684b99ff-stnvz\" (UID: \"c9735021-bb7c-43ac-b95e-a4fd2d26c84a\") " pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.447353 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c9735021-bb7c-43ac-b95e-a4fd2d26c84a-webhook-cert\") pod \"nova-operator-controller-manager-6b684b99ff-stnvz\" (UID: \"c9735021-bb7c-43ac-b95e-a4fd2d26c84a\") " pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.447421 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c9735021-bb7c-43ac-b95e-a4fd2d26c84a-apiservice-cert\") pod \"nova-operator-controller-manager-6b684b99ff-stnvz\" (UID: \"c9735021-bb7c-43ac-b95e-a4fd2d26c84a\") " pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.447454 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvdcg\" (UniqueName: \"kubernetes.io/projected/c9735021-bb7c-43ac-b95e-a4fd2d26c84a-kube-api-access-rvdcg\") pod \"nova-operator-controller-manager-6b684b99ff-stnvz\" (UID: \"c9735021-bb7c-43ac-b95e-a4fd2d26c84a\") " pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.464378 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c9735021-bb7c-43ac-b95e-a4fd2d26c84a-apiservice-cert\") pod \"nova-operator-controller-manager-6b684b99ff-stnvz\" (UID: \"c9735021-bb7c-43ac-b95e-a4fd2d26c84a\") " pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.467914 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c9735021-bb7c-43ac-b95e-a4fd2d26c84a-webhook-cert\") pod \"nova-operator-controller-manager-6b684b99ff-stnvz\" (UID: \"c9735021-bb7c-43ac-b95e-a4fd2d26c84a\") " pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.468663 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvdcg\" (UniqueName: \"kubernetes.io/projected/c9735021-bb7c-43ac-b95e-a4fd2d26c84a-kube-api-access-rvdcg\") pod \"nova-operator-controller-manager-6b684b99ff-stnvz\" (UID: \"c9735021-bb7c-43ac-b95e-a4fd2d26c84a\") " pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:42:54 crc kubenswrapper[4711]: I0123 08:42:54.507294 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:42:55 crc kubenswrapper[4711]: I0123 08:42:55.359408 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz"] Jan 23 08:42:55 crc kubenswrapper[4711]: I0123 08:42:55.460092 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" event={"ID":"c9735021-bb7c-43ac-b95e-a4fd2d26c84a","Type":"ContainerStarted","Data":"25f037579c920612750b117d48a0c6978ca0b036c95a0bc648ee648dcebbe0af"} Jan 23 08:42:56 crc kubenswrapper[4711]: I0123 08:42:56.471695 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" event={"ID":"c9735021-bb7c-43ac-b95e-a4fd2d26c84a","Type":"ContainerStarted","Data":"0c2b0cae7b73bd21fd32c50a8e61f3edeac6c10ed0b2c87c15ee42db066125a4"} Jan 23 08:42:56 crc kubenswrapper[4711]: I0123 08:42:56.473265 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:43:04 crc kubenswrapper[4711]: I0123 08:43:04.511620 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" Jan 23 08:43:04 crc kubenswrapper[4711]: I0123 08:43:04.549358 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-6b684b99ff-stnvz" podStartSLOduration=10.549341026 podStartE2EDuration="10.549341026s" podCreationTimestamp="2026-01-23 08:42:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:42:56.493891196 +0000 UTC m=+1362.066847564" watchObservedRunningTime="2026-01-23 08:43:04.549341026 +0000 UTC m=+1370.122297394" Jan 23 08:43:33 crc kubenswrapper[4711]: I0123 08:43:33.930670 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-api-db-create-v84r6"] Jan 23 08:43:33 crc kubenswrapper[4711]: I0123 08:43:33.932131 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-v84r6" Jan 23 08:43:33 crc kubenswrapper[4711]: I0123 08:43:33.939601 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-db-create-v84r6"] Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.027822 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-spdnp"] Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.029883 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-spdnp" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.034799 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-spdnp"] Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.072456 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4382f53-4838-48fd-b802-b0912baaac04-operator-scripts\") pod \"nova-api-db-create-v84r6\" (UID: \"f4382f53-4838-48fd-b802-b0912baaac04\") " pod="nova-kuttl-default/nova-api-db-create-v84r6" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.072551 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fdpb\" (UniqueName: \"kubernetes.io/projected/f4382f53-4838-48fd-b802-b0912baaac04-kube-api-access-9fdpb\") pod \"nova-api-db-create-v84r6\" (UID: \"f4382f53-4838-48fd-b802-b0912baaac04\") " pod="nova-kuttl-default/nova-api-db-create-v84r6" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.139291 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b"] Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.140536 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.142251 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-api-db-secret" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.157059 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b"] Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.173922 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fdpb\" (UniqueName: \"kubernetes.io/projected/f4382f53-4838-48fd-b802-b0912baaac04-kube-api-access-9fdpb\") pod \"nova-api-db-create-v84r6\" (UID: \"f4382f53-4838-48fd-b802-b0912baaac04\") " pod="nova-kuttl-default/nova-api-db-create-v84r6" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.174025 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8-operator-scripts\") pod \"nova-cell0-db-create-spdnp\" (UID: \"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8\") " pod="nova-kuttl-default/nova-cell0-db-create-spdnp" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.174058 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgcs4\" (UniqueName: \"kubernetes.io/projected/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8-kube-api-access-qgcs4\") pod \"nova-cell0-db-create-spdnp\" (UID: \"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8\") " pod="nova-kuttl-default/nova-cell0-db-create-spdnp" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.174086 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4382f53-4838-48fd-b802-b0912baaac04-operator-scripts\") pod \"nova-api-db-create-v84r6\" (UID: \"f4382f53-4838-48fd-b802-b0912baaac04\") " pod="nova-kuttl-default/nova-api-db-create-v84r6" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.174846 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4382f53-4838-48fd-b802-b0912baaac04-operator-scripts\") pod \"nova-api-db-create-v84r6\" (UID: \"f4382f53-4838-48fd-b802-b0912baaac04\") " pod="nova-kuttl-default/nova-api-db-create-v84r6" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.198402 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fdpb\" (UniqueName: \"kubernetes.io/projected/f4382f53-4838-48fd-b802-b0912baaac04-kube-api-access-9fdpb\") pod \"nova-api-db-create-v84r6\" (UID: \"f4382f53-4838-48fd-b802-b0912baaac04\") " pod="nova-kuttl-default/nova-api-db-create-v84r6" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.228389 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-hxfhf"] Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.229534 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.238452 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-hxfhf"] Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.250180 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-v84r6" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.275882 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9662d64-6770-40c9-82c4-787eced67f4d-operator-scripts\") pod \"nova-cell1-db-create-hxfhf\" (UID: \"c9662d64-6770-40c9-82c4-787eced67f4d\") " pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.276218 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxq22\" (UniqueName: \"kubernetes.io/projected/c9662d64-6770-40c9-82c4-787eced67f4d-kube-api-access-gxq22\") pod \"nova-cell1-db-create-hxfhf\" (UID: \"c9662d64-6770-40c9-82c4-787eced67f4d\") " pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.276568 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr47b\" (UniqueName: \"kubernetes.io/projected/e92d7a9b-a014-4599-b179-1aaecf94ef42-kube-api-access-lr47b\") pod \"nova-api-9e56-account-create-update-xfj2b\" (UID: \"e92d7a9b-a014-4599-b179-1aaecf94ef42\") " pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.276749 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e92d7a9b-a014-4599-b179-1aaecf94ef42-operator-scripts\") pod \"nova-api-9e56-account-create-update-xfj2b\" (UID: \"e92d7a9b-a014-4599-b179-1aaecf94ef42\") " pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.276922 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8-operator-scripts\") pod \"nova-cell0-db-create-spdnp\" (UID: \"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8\") " pod="nova-kuttl-default/nova-cell0-db-create-spdnp" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.277052 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgcs4\" (UniqueName: \"kubernetes.io/projected/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8-kube-api-access-qgcs4\") pod \"nova-cell0-db-create-spdnp\" (UID: \"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8\") " pod="nova-kuttl-default/nova-cell0-db-create-spdnp" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.277893 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8-operator-scripts\") pod \"nova-cell0-db-create-spdnp\" (UID: \"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8\") " pod="nova-kuttl-default/nova-cell0-db-create-spdnp" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.300154 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgcs4\" (UniqueName: \"kubernetes.io/projected/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8-kube-api-access-qgcs4\") pod \"nova-cell0-db-create-spdnp\" (UID: \"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8\") " pod="nova-kuttl-default/nova-cell0-db-create-spdnp" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.337335 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d"] Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.338618 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.342272 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-cell0-db-secret" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.359475 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-spdnp" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.360429 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d"] Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.379712 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9662d64-6770-40c9-82c4-787eced67f4d-operator-scripts\") pod \"nova-cell1-db-create-hxfhf\" (UID: \"c9662d64-6770-40c9-82c4-787eced67f4d\") " pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.379757 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxq22\" (UniqueName: \"kubernetes.io/projected/c9662d64-6770-40c9-82c4-787eced67f4d-kube-api-access-gxq22\") pod \"nova-cell1-db-create-hxfhf\" (UID: \"c9662d64-6770-40c9-82c4-787eced67f4d\") " pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.379809 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr47b\" (UniqueName: \"kubernetes.io/projected/e92d7a9b-a014-4599-b179-1aaecf94ef42-kube-api-access-lr47b\") pod \"nova-api-9e56-account-create-update-xfj2b\" (UID: \"e92d7a9b-a014-4599-b179-1aaecf94ef42\") " pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.379860 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e92d7a9b-a014-4599-b179-1aaecf94ef42-operator-scripts\") pod \"nova-api-9e56-account-create-update-xfj2b\" (UID: \"e92d7a9b-a014-4599-b179-1aaecf94ef42\") " pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.381355 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e92d7a9b-a014-4599-b179-1aaecf94ef42-operator-scripts\") pod \"nova-api-9e56-account-create-update-xfj2b\" (UID: \"e92d7a9b-a014-4599-b179-1aaecf94ef42\") " pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.381785 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9662d64-6770-40c9-82c4-787eced67f4d-operator-scripts\") pod \"nova-cell1-db-create-hxfhf\" (UID: \"c9662d64-6770-40c9-82c4-787eced67f4d\") " pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.423797 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxq22\" (UniqueName: \"kubernetes.io/projected/c9662d64-6770-40c9-82c4-787eced67f4d-kube-api-access-gxq22\") pod \"nova-cell1-db-create-hxfhf\" (UID: \"c9662d64-6770-40c9-82c4-787eced67f4d\") " pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.426233 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr47b\" (UniqueName: \"kubernetes.io/projected/e92d7a9b-a014-4599-b179-1aaecf94ef42-kube-api-access-lr47b\") pod \"nova-api-9e56-account-create-update-xfj2b\" (UID: \"e92d7a9b-a014-4599-b179-1aaecf94ef42\") " pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.455997 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.483233 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75efce06-d8d9-435f-850b-da48f7191d74-operator-scripts\") pod \"nova-cell0-dab5-account-create-update-pvx6d\" (UID: \"75efce06-d8d9-435f-850b-da48f7191d74\") " pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.483319 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2j26\" (UniqueName: \"kubernetes.io/projected/75efce06-d8d9-435f-850b-da48f7191d74-kube-api-access-v2j26\") pod \"nova-cell0-dab5-account-create-update-pvx6d\" (UID: \"75efce06-d8d9-435f-850b-da48f7191d74\") " pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.546906 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd"] Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.548056 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.554675 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.555788 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd"] Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.559186 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-cell1-db-secret" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.584617 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2j26\" (UniqueName: \"kubernetes.io/projected/75efce06-d8d9-435f-850b-da48f7191d74-kube-api-access-v2j26\") pod \"nova-cell0-dab5-account-create-update-pvx6d\" (UID: \"75efce06-d8d9-435f-850b-da48f7191d74\") " pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.584884 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75efce06-d8d9-435f-850b-da48f7191d74-operator-scripts\") pod \"nova-cell0-dab5-account-create-update-pvx6d\" (UID: \"75efce06-d8d9-435f-850b-da48f7191d74\") " pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.585668 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75efce06-d8d9-435f-850b-da48f7191d74-operator-scripts\") pod \"nova-cell0-dab5-account-create-update-pvx6d\" (UID: \"75efce06-d8d9-435f-850b-da48f7191d74\") " pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.602949 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2j26\" (UniqueName: \"kubernetes.io/projected/75efce06-d8d9-435f-850b-da48f7191d74-kube-api-access-v2j26\") pod \"nova-cell0-dab5-account-create-update-pvx6d\" (UID: \"75efce06-d8d9-435f-850b-da48f7191d74\") " pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.763053 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.813035 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-db-create-v84r6"] Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.861929 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d1e6c02-734b-4427-8817-2a06ff94e5a0-operator-scripts\") pod \"nova-cell1-7ea3-account-create-update-zngqd\" (UID: \"0d1e6c02-734b-4427-8817-2a06ff94e5a0\") " pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.862372 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt5wq\" (UniqueName: \"kubernetes.io/projected/0d1e6c02-734b-4427-8817-2a06ff94e5a0-kube-api-access-mt5wq\") pod \"nova-cell1-7ea3-account-create-update-zngqd\" (UID: \"0d1e6c02-734b-4427-8817-2a06ff94e5a0\") " pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.874353 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-v84r6" event={"ID":"f4382f53-4838-48fd-b802-b0912baaac04","Type":"ContainerStarted","Data":"1ea08ccb2e20ff93667026e67f418625371ab453b70bb513cc284d0162c9dcbe"} Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.964240 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d1e6c02-734b-4427-8817-2a06ff94e5a0-operator-scripts\") pod \"nova-cell1-7ea3-account-create-update-zngqd\" (UID: \"0d1e6c02-734b-4427-8817-2a06ff94e5a0\") " pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.967792 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d1e6c02-734b-4427-8817-2a06ff94e5a0-operator-scripts\") pod \"nova-cell1-7ea3-account-create-update-zngqd\" (UID: \"0d1e6c02-734b-4427-8817-2a06ff94e5a0\") " pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" Jan 23 08:43:34 crc kubenswrapper[4711]: I0123 08:43:34.985113 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt5wq\" (UniqueName: \"kubernetes.io/projected/0d1e6c02-734b-4427-8817-2a06ff94e5a0-kube-api-access-mt5wq\") pod \"nova-cell1-7ea3-account-create-update-zngqd\" (UID: \"0d1e6c02-734b-4427-8817-2a06ff94e5a0\") " pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.016680 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt5wq\" (UniqueName: \"kubernetes.io/projected/0d1e6c02-734b-4427-8817-2a06ff94e5a0-kube-api-access-mt5wq\") pod \"nova-cell1-7ea3-account-create-update-zngqd\" (UID: \"0d1e6c02-734b-4427-8817-2a06ff94e5a0\") " pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" Jan 23 08:43:35 crc kubenswrapper[4711]: W0123 08:43:35.107092 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62c3a2da_aa4f_4cef_8a9e_d3cf3c3e9ca8.slice/crio-f30e67176b71b99d90a34a4e3fb404ccb0ea1575d906d5517060e17aaa269adf WatchSource:0}: Error finding container f30e67176b71b99d90a34a4e3fb404ccb0ea1575d906d5517060e17aaa269adf: Status 404 returned error can't find the container with id f30e67176b71b99d90a34a4e3fb404ccb0ea1575d906d5517060e17aaa269adf Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.108195 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-spdnp"] Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.173576 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.208582 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b"] Jan 23 08:43:35 crc kubenswrapper[4711]: W0123 08:43:35.215002 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode92d7a9b_a014_4599_b179_1aaecf94ef42.slice/crio-091ec59453938045e4bf3bb93dc85a9fb7cbb03276a378b2036537a9d830180d WatchSource:0}: Error finding container 091ec59453938045e4bf3bb93dc85a9fb7cbb03276a378b2036537a9d830180d: Status 404 returned error can't find the container with id 091ec59453938045e4bf3bb93dc85a9fb7cbb03276a378b2036537a9d830180d Jan 23 08:43:35 crc kubenswrapper[4711]: W0123 08:43:35.342836 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75efce06_d8d9_435f_850b_da48f7191d74.slice/crio-f9aafa3d54d1439a1c3ea64369879a13443a08155d4699f07856640f3f65c99a WatchSource:0}: Error finding container f9aafa3d54d1439a1c3ea64369879a13443a08155d4699f07856640f3f65c99a: Status 404 returned error can't find the container with id f9aafa3d54d1439a1c3ea64369879a13443a08155d4699f07856640f3f65c99a Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.351659 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d"] Jan 23 08:43:35 crc kubenswrapper[4711]: W0123 08:43:35.353808 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9662d64_6770_40c9_82c4_787eced67f4d.slice/crio-e6795a9e19dec0400b621ca7b082660c887cbf656dc601584c3681d29b3b33e0 WatchSource:0}: Error finding container e6795a9e19dec0400b621ca7b082660c887cbf656dc601584c3681d29b3b33e0: Status 404 returned error can't find the container with id e6795a9e19dec0400b621ca7b082660c887cbf656dc601584c3681d29b3b33e0 Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.371080 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-hxfhf"] Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.671070 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd"] Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.881831 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-v84r6" event={"ID":"f4382f53-4838-48fd-b802-b0912baaac04","Type":"ContainerStarted","Data":"12aad7d7e3601df2d1031155d252b192ee7b6a5c8a6fad4c4094ade950e47290"} Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.882864 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" event={"ID":"0d1e6c02-734b-4427-8817-2a06ff94e5a0","Type":"ContainerStarted","Data":"04776ccd6b4af39cb8d9ff0d0905554eea6b581d644477ca1af1e61a92b6a4aa"} Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.885266 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-spdnp" event={"ID":"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8","Type":"ContainerStarted","Data":"6c07aa742c175924fadf2c34a07aa6a32bb93df07ea8b6b5a2d4bc8266bfaea7"} Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.885914 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-spdnp" event={"ID":"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8","Type":"ContainerStarted","Data":"f30e67176b71b99d90a34a4e3fb404ccb0ea1575d906d5517060e17aaa269adf"} Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.887529 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" event={"ID":"75efce06-d8d9-435f-850b-da48f7191d74","Type":"ContainerStarted","Data":"8ab9761500237919aa5f88c2639738861b193c4e8389e0149265fecc70bd84a0"} Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.887557 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" event={"ID":"75efce06-d8d9-435f-850b-da48f7191d74","Type":"ContainerStarted","Data":"f9aafa3d54d1439a1c3ea64369879a13443a08155d4699f07856640f3f65c99a"} Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.889136 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" event={"ID":"c9662d64-6770-40c9-82c4-787eced67f4d","Type":"ContainerStarted","Data":"c568d53916b5f2a2e500cb3f258a5f005a235f7d03b21e197e50e65b6324242f"} Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.889161 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" event={"ID":"c9662d64-6770-40c9-82c4-787eced67f4d","Type":"ContainerStarted","Data":"e6795a9e19dec0400b621ca7b082660c887cbf656dc601584c3681d29b3b33e0"} Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.891426 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" event={"ID":"e92d7a9b-a014-4599-b179-1aaecf94ef42","Type":"ContainerStarted","Data":"281a5f793f8779075584aea6071d8d5b8b5eaca314ee5932b5a49611e614de7f"} Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.891473 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" event={"ID":"e92d7a9b-a014-4599-b179-1aaecf94ef42","Type":"ContainerStarted","Data":"091ec59453938045e4bf3bb93dc85a9fb7cbb03276a378b2036537a9d830180d"} Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.896837 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-api-db-create-v84r6" podStartSLOduration=2.8968227779999998 podStartE2EDuration="2.896822778s" podCreationTimestamp="2026-01-23 08:43:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:43:35.893977638 +0000 UTC m=+1401.466934006" watchObservedRunningTime="2026-01-23 08:43:35.896822778 +0000 UTC m=+1401.469779146" Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.914961 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-cell0-db-create-spdnp" podStartSLOduration=1.9149440279999999 podStartE2EDuration="1.914944028s" podCreationTimestamp="2026-01-23 08:43:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:43:35.908695047 +0000 UTC m=+1401.481651425" watchObservedRunningTime="2026-01-23 08:43:35.914944028 +0000 UTC m=+1401.487900386" Jan 23 08:43:35 crc kubenswrapper[4711]: I0123 08:43:35.928848 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" podStartSLOduration=1.928830227 podStartE2EDuration="1.928830227s" podCreationTimestamp="2026-01-23 08:43:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:43:35.920847813 +0000 UTC m=+1401.493804181" watchObservedRunningTime="2026-01-23 08:43:35.928830227 +0000 UTC m=+1401.501786595" Jan 23 08:43:36 crc kubenswrapper[4711]: I0123 08:43:36.899970 4711 generic.go:334] "Generic (PLEG): container finished" podID="c9662d64-6770-40c9-82c4-787eced67f4d" containerID="c568d53916b5f2a2e500cb3f258a5f005a235f7d03b21e197e50e65b6324242f" exitCode=0 Jan 23 08:43:36 crc kubenswrapper[4711]: I0123 08:43:36.900039 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" event={"ID":"c9662d64-6770-40c9-82c4-787eced67f4d","Type":"ContainerDied","Data":"c568d53916b5f2a2e500cb3f258a5f005a235f7d03b21e197e50e65b6324242f"} Jan 23 08:43:36 crc kubenswrapper[4711]: I0123 08:43:36.902488 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4382f53-4838-48fd-b802-b0912baaac04" containerID="12aad7d7e3601df2d1031155d252b192ee7b6a5c8a6fad4c4094ade950e47290" exitCode=0 Jan 23 08:43:36 crc kubenswrapper[4711]: I0123 08:43:36.902591 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-v84r6" event={"ID":"f4382f53-4838-48fd-b802-b0912baaac04","Type":"ContainerDied","Data":"12aad7d7e3601df2d1031155d252b192ee7b6a5c8a6fad4c4094ade950e47290"} Jan 23 08:43:36 crc kubenswrapper[4711]: I0123 08:43:36.904351 4711 generic.go:334] "Generic (PLEG): container finished" podID="0d1e6c02-734b-4427-8817-2a06ff94e5a0" containerID="6c74a71566fb01e4966e4dffe98c410b6d6c8cdcc562b1d9176018d078548949" exitCode=0 Jan 23 08:43:36 crc kubenswrapper[4711]: I0123 08:43:36.904390 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" event={"ID":"0d1e6c02-734b-4427-8817-2a06ff94e5a0","Type":"ContainerDied","Data":"6c74a71566fb01e4966e4dffe98c410b6d6c8cdcc562b1d9176018d078548949"} Jan 23 08:43:36 crc kubenswrapper[4711]: I0123 08:43:36.905762 4711 generic.go:334] "Generic (PLEG): container finished" podID="62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8" containerID="6c07aa742c175924fadf2c34a07aa6a32bb93df07ea8b6b5a2d4bc8266bfaea7" exitCode=0 Jan 23 08:43:36 crc kubenswrapper[4711]: I0123 08:43:36.905793 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-spdnp" event={"ID":"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8","Type":"ContainerDied","Data":"6c07aa742c175924fadf2c34a07aa6a32bb93df07ea8b6b5a2d4bc8266bfaea7"} Jan 23 08:43:36 crc kubenswrapper[4711]: I0123 08:43:36.932808 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" podStartSLOduration=2.932786596 podStartE2EDuration="2.932786596s" podCreationTimestamp="2026-01-23 08:43:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:43:36.928709216 +0000 UTC m=+1402.501665584" watchObservedRunningTime="2026-01-23 08:43:36.932786596 +0000 UTC m=+1402.505742974" Jan 23 08:43:37 crc kubenswrapper[4711]: I0123 08:43:37.918011 4711 generic.go:334] "Generic (PLEG): container finished" podID="75efce06-d8d9-435f-850b-da48f7191d74" containerID="8ab9761500237919aa5f88c2639738861b193c4e8389e0149265fecc70bd84a0" exitCode=0 Jan 23 08:43:37 crc kubenswrapper[4711]: I0123 08:43:37.918077 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" event={"ID":"75efce06-d8d9-435f-850b-da48f7191d74","Type":"ContainerDied","Data":"8ab9761500237919aa5f88c2639738861b193c4e8389e0149265fecc70bd84a0"} Jan 23 08:43:37 crc kubenswrapper[4711]: I0123 08:43:37.920070 4711 generic.go:334] "Generic (PLEG): container finished" podID="e92d7a9b-a014-4599-b179-1aaecf94ef42" containerID="281a5f793f8779075584aea6071d8d5b8b5eaca314ee5932b5a49611e614de7f" exitCode=0 Jan 23 08:43:37 crc kubenswrapper[4711]: I0123 08:43:37.920099 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" event={"ID":"e92d7a9b-a014-4599-b179-1aaecf94ef42","Type":"ContainerDied","Data":"281a5f793f8779075584aea6071d8d5b8b5eaca314ee5932b5a49611e614de7f"} Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.444834 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.631633 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-spdnp" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.637797 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-v84r6" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.648562 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.652204 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mt5wq\" (UniqueName: \"kubernetes.io/projected/0d1e6c02-734b-4427-8817-2a06ff94e5a0-kube-api-access-mt5wq\") pod \"0d1e6c02-734b-4427-8817-2a06ff94e5a0\" (UID: \"0d1e6c02-734b-4427-8817-2a06ff94e5a0\") " Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.652379 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d1e6c02-734b-4427-8817-2a06ff94e5a0-operator-scripts\") pod \"0d1e6c02-734b-4427-8817-2a06ff94e5a0\" (UID: \"0d1e6c02-734b-4427-8817-2a06ff94e5a0\") " Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.653482 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d1e6c02-734b-4427-8817-2a06ff94e5a0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0d1e6c02-734b-4427-8817-2a06ff94e5a0" (UID: "0d1e6c02-734b-4427-8817-2a06ff94e5a0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.658610 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d1e6c02-734b-4427-8817-2a06ff94e5a0-kube-api-access-mt5wq" (OuterVolumeSpecName: "kube-api-access-mt5wq") pod "0d1e6c02-734b-4427-8817-2a06ff94e5a0" (UID: "0d1e6c02-734b-4427-8817-2a06ff94e5a0"). InnerVolumeSpecName "kube-api-access-mt5wq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.754448 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4382f53-4838-48fd-b802-b0912baaac04-operator-scripts\") pod \"f4382f53-4838-48fd-b802-b0912baaac04\" (UID: \"f4382f53-4838-48fd-b802-b0912baaac04\") " Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.754636 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgcs4\" (UniqueName: \"kubernetes.io/projected/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8-kube-api-access-qgcs4\") pod \"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8\" (UID: \"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8\") " Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.754665 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fdpb\" (UniqueName: \"kubernetes.io/projected/f4382f53-4838-48fd-b802-b0912baaac04-kube-api-access-9fdpb\") pod \"f4382f53-4838-48fd-b802-b0912baaac04\" (UID: \"f4382f53-4838-48fd-b802-b0912baaac04\") " Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.754711 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8-operator-scripts\") pod \"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8\" (UID: \"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8\") " Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.754753 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9662d64-6770-40c9-82c4-787eced67f4d-operator-scripts\") pod \"c9662d64-6770-40c9-82c4-787eced67f4d\" (UID: \"c9662d64-6770-40c9-82c4-787eced67f4d\") " Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.754805 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxq22\" (UniqueName: \"kubernetes.io/projected/c9662d64-6770-40c9-82c4-787eced67f4d-kube-api-access-gxq22\") pod \"c9662d64-6770-40c9-82c4-787eced67f4d\" (UID: \"c9662d64-6770-40c9-82c4-787eced67f4d\") " Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.754945 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4382f53-4838-48fd-b802-b0912baaac04-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f4382f53-4838-48fd-b802-b0912baaac04" (UID: "f4382f53-4838-48fd-b802-b0912baaac04"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.755169 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4382f53-4838-48fd-b802-b0912baaac04-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.755188 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mt5wq\" (UniqueName: \"kubernetes.io/projected/0d1e6c02-734b-4427-8817-2a06ff94e5a0-kube-api-access-mt5wq\") on node \"crc\" DevicePath \"\"" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.755201 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d1e6c02-734b-4427-8817-2a06ff94e5a0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.755376 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8" (UID: "62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.755778 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9662d64-6770-40c9-82c4-787eced67f4d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c9662d64-6770-40c9-82c4-787eced67f4d" (UID: "c9662d64-6770-40c9-82c4-787eced67f4d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.757458 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8-kube-api-access-qgcs4" (OuterVolumeSpecName: "kube-api-access-qgcs4") pod "62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8" (UID: "62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8"). InnerVolumeSpecName "kube-api-access-qgcs4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.758082 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4382f53-4838-48fd-b802-b0912baaac04-kube-api-access-9fdpb" (OuterVolumeSpecName: "kube-api-access-9fdpb") pod "f4382f53-4838-48fd-b802-b0912baaac04" (UID: "f4382f53-4838-48fd-b802-b0912baaac04"). InnerVolumeSpecName "kube-api-access-9fdpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.758738 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9662d64-6770-40c9-82c4-787eced67f4d-kube-api-access-gxq22" (OuterVolumeSpecName: "kube-api-access-gxq22") pod "c9662d64-6770-40c9-82c4-787eced67f4d" (UID: "c9662d64-6770-40c9-82c4-787eced67f4d"). InnerVolumeSpecName "kube-api-access-gxq22". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.857090 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgcs4\" (UniqueName: \"kubernetes.io/projected/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8-kube-api-access-qgcs4\") on node \"crc\" DevicePath \"\"" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.857113 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fdpb\" (UniqueName: \"kubernetes.io/projected/f4382f53-4838-48fd-b802-b0912baaac04-kube-api-access-9fdpb\") on node \"crc\" DevicePath \"\"" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.857125 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.857134 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9662d64-6770-40c9-82c4-787eced67f4d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.857143 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxq22\" (UniqueName: \"kubernetes.io/projected/c9662d64-6770-40c9-82c4-787eced67f4d-kube-api-access-gxq22\") on node \"crc\" DevicePath \"\"" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.927628 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" event={"ID":"0d1e6c02-734b-4427-8817-2a06ff94e5a0","Type":"ContainerDied","Data":"04776ccd6b4af39cb8d9ff0d0905554eea6b581d644477ca1af1e61a92b6a4aa"} Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.927663 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04776ccd6b4af39cb8d9ff0d0905554eea6b581d644477ca1af1e61a92b6a4aa" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.927717 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.933488 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-spdnp" event={"ID":"62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8","Type":"ContainerDied","Data":"f30e67176b71b99d90a34a4e3fb404ccb0ea1575d906d5517060e17aaa269adf"} Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.933544 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f30e67176b71b99d90a34a4e3fb404ccb0ea1575d906d5517060e17aaa269adf" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.933603 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-spdnp" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.940852 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.941382 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-hxfhf" event={"ID":"c9662d64-6770-40c9-82c4-787eced67f4d","Type":"ContainerDied","Data":"e6795a9e19dec0400b621ca7b082660c887cbf656dc601584c3681d29b3b33e0"} Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.941412 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6795a9e19dec0400b621ca7b082660c887cbf656dc601584c3681d29b3b33e0" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.943934 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-v84r6" event={"ID":"f4382f53-4838-48fd-b802-b0912baaac04","Type":"ContainerDied","Data":"1ea08ccb2e20ff93667026e67f418625371ab453b70bb513cc284d0162c9dcbe"} Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.943995 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ea08ccb2e20ff93667026e67f418625371ab453b70bb513cc284d0162c9dcbe" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:38.943951 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-v84r6" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:40.905439 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:40.910468 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:40.960697 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:40.960717 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d" event={"ID":"75efce06-d8d9-435f-850b-da48f7191d74","Type":"ContainerDied","Data":"f9aafa3d54d1439a1c3ea64369879a13443a08155d4699f07856640f3f65c99a"} Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:40.960774 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9aafa3d54d1439a1c3ea64369879a13443a08155d4699f07856640f3f65c99a" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:40.962444 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" event={"ID":"e92d7a9b-a014-4599-b179-1aaecf94ef42","Type":"ContainerDied","Data":"091ec59453938045e4bf3bb93dc85a9fb7cbb03276a378b2036537a9d830180d"} Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:40.962475 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="091ec59453938045e4bf3bb93dc85a9fb7cbb03276a378b2036537a9d830180d" Jan 23 08:43:40 crc kubenswrapper[4711]: I0123 08:43:40.962545 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b" Jan 23 08:43:41 crc kubenswrapper[4711]: I0123 08:43:41.090639 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lr47b\" (UniqueName: \"kubernetes.io/projected/e92d7a9b-a014-4599-b179-1aaecf94ef42-kube-api-access-lr47b\") pod \"e92d7a9b-a014-4599-b179-1aaecf94ef42\" (UID: \"e92d7a9b-a014-4599-b179-1aaecf94ef42\") " Jan 23 08:43:41 crc kubenswrapper[4711]: I0123 08:43:41.090689 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2j26\" (UniqueName: \"kubernetes.io/projected/75efce06-d8d9-435f-850b-da48f7191d74-kube-api-access-v2j26\") pod \"75efce06-d8d9-435f-850b-da48f7191d74\" (UID: \"75efce06-d8d9-435f-850b-da48f7191d74\") " Jan 23 08:43:41 crc kubenswrapper[4711]: I0123 08:43:41.090803 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e92d7a9b-a014-4599-b179-1aaecf94ef42-operator-scripts\") pod \"e92d7a9b-a014-4599-b179-1aaecf94ef42\" (UID: \"e92d7a9b-a014-4599-b179-1aaecf94ef42\") " Jan 23 08:43:41 crc kubenswrapper[4711]: I0123 08:43:41.090888 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75efce06-d8d9-435f-850b-da48f7191d74-operator-scripts\") pod \"75efce06-d8d9-435f-850b-da48f7191d74\" (UID: \"75efce06-d8d9-435f-850b-da48f7191d74\") " Jan 23 08:43:41 crc kubenswrapper[4711]: I0123 08:43:41.091529 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e92d7a9b-a014-4599-b179-1aaecf94ef42-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e92d7a9b-a014-4599-b179-1aaecf94ef42" (UID: "e92d7a9b-a014-4599-b179-1aaecf94ef42"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:43:41 crc kubenswrapper[4711]: I0123 08:43:41.091553 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75efce06-d8d9-435f-850b-da48f7191d74-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "75efce06-d8d9-435f-850b-da48f7191d74" (UID: "75efce06-d8d9-435f-850b-da48f7191d74"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:43:41 crc kubenswrapper[4711]: I0123 08:43:41.105987 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75efce06-d8d9-435f-850b-da48f7191d74-kube-api-access-v2j26" (OuterVolumeSpecName: "kube-api-access-v2j26") pod "75efce06-d8d9-435f-850b-da48f7191d74" (UID: "75efce06-d8d9-435f-850b-da48f7191d74"). InnerVolumeSpecName "kube-api-access-v2j26". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:43:41 crc kubenswrapper[4711]: I0123 08:43:41.106093 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e92d7a9b-a014-4599-b179-1aaecf94ef42-kube-api-access-lr47b" (OuterVolumeSpecName: "kube-api-access-lr47b") pod "e92d7a9b-a014-4599-b179-1aaecf94ef42" (UID: "e92d7a9b-a014-4599-b179-1aaecf94ef42"). InnerVolumeSpecName "kube-api-access-lr47b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:43:41 crc kubenswrapper[4711]: I0123 08:43:41.192589 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75efce06-d8d9-435f-850b-da48f7191d74-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:43:41 crc kubenswrapper[4711]: I0123 08:43:41.192646 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lr47b\" (UniqueName: \"kubernetes.io/projected/e92d7a9b-a014-4599-b179-1aaecf94ef42-kube-api-access-lr47b\") on node \"crc\" DevicePath \"\"" Jan 23 08:43:41 crc kubenswrapper[4711]: I0123 08:43:41.193153 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2j26\" (UniqueName: \"kubernetes.io/projected/75efce06-d8d9-435f-850b-da48f7191d74-kube-api-access-v2j26\") on node \"crc\" DevicePath \"\"" Jan 23 08:43:41 crc kubenswrapper[4711]: I0123 08:43:41.193250 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e92d7a9b-a014-4599-b179-1aaecf94ef42-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.984787 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hmqvm"] Jan 23 08:43:43 crc kubenswrapper[4711]: E0123 08:43:43.985163 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e92d7a9b-a014-4599-b179-1aaecf94ef42" containerName="mariadb-account-create-update" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.985179 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e92d7a9b-a014-4599-b179-1aaecf94ef42" containerName="mariadb-account-create-update" Jan 23 08:43:43 crc kubenswrapper[4711]: E0123 08:43:43.985226 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4382f53-4838-48fd-b802-b0912baaac04" containerName="mariadb-database-create" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.985236 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4382f53-4838-48fd-b802-b0912baaac04" containerName="mariadb-database-create" Jan 23 08:43:43 crc kubenswrapper[4711]: E0123 08:43:43.985258 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9662d64-6770-40c9-82c4-787eced67f4d" containerName="mariadb-database-create" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.985266 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9662d64-6770-40c9-82c4-787eced67f4d" containerName="mariadb-database-create" Jan 23 08:43:43 crc kubenswrapper[4711]: E0123 08:43:43.985288 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8" containerName="mariadb-database-create" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.985296 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8" containerName="mariadb-database-create" Jan 23 08:43:43 crc kubenswrapper[4711]: E0123 08:43:43.985315 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d1e6c02-734b-4427-8817-2a06ff94e5a0" containerName="mariadb-account-create-update" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.985323 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d1e6c02-734b-4427-8817-2a06ff94e5a0" containerName="mariadb-account-create-update" Jan 23 08:43:43 crc kubenswrapper[4711]: E0123 08:43:43.985342 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75efce06-d8d9-435f-850b-da48f7191d74" containerName="mariadb-account-create-update" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.985350 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="75efce06-d8d9-435f-850b-da48f7191d74" containerName="mariadb-account-create-update" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.985573 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8" containerName="mariadb-database-create" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.985588 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9662d64-6770-40c9-82c4-787eced67f4d" containerName="mariadb-database-create" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.985600 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4382f53-4838-48fd-b802-b0912baaac04" containerName="mariadb-database-create" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.985612 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e92d7a9b-a014-4599-b179-1aaecf94ef42" containerName="mariadb-account-create-update" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.985623 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="75efce06-d8d9-435f-850b-da48f7191d74" containerName="mariadb-account-create-update" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.985640 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d1e6c02-734b-4427-8817-2a06ff94e5a0" containerName="mariadb-account-create-update" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.987088 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:43:43 crc kubenswrapper[4711]: I0123 08:43:43.997040 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hmqvm"] Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.140199 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e4096eb-c9d6-48b8-b649-e59f16dc4167-utilities\") pod \"redhat-operators-hmqvm\" (UID: \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\") " pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.140258 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e4096eb-c9d6-48b8-b649-e59f16dc4167-catalog-content\") pod \"redhat-operators-hmqvm\" (UID: \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\") " pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.140365 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-429l5\" (UniqueName: \"kubernetes.io/projected/6e4096eb-c9d6-48b8-b649-e59f16dc4167-kube-api-access-429l5\") pod \"redhat-operators-hmqvm\" (UID: \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\") " pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.241411 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-429l5\" (UniqueName: \"kubernetes.io/projected/6e4096eb-c9d6-48b8-b649-e59f16dc4167-kube-api-access-429l5\") pod \"redhat-operators-hmqvm\" (UID: \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\") " pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.241486 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e4096eb-c9d6-48b8-b649-e59f16dc4167-utilities\") pod \"redhat-operators-hmqvm\" (UID: \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\") " pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.241592 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e4096eb-c9d6-48b8-b649-e59f16dc4167-catalog-content\") pod \"redhat-operators-hmqvm\" (UID: \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\") " pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.242175 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e4096eb-c9d6-48b8-b649-e59f16dc4167-catalog-content\") pod \"redhat-operators-hmqvm\" (UID: \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\") " pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.242180 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e4096eb-c9d6-48b8-b649-e59f16dc4167-utilities\") pod \"redhat-operators-hmqvm\" (UID: \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\") " pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.265882 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-429l5\" (UniqueName: \"kubernetes.io/projected/6e4096eb-c9d6-48b8-b649-e59f16dc4167-kube-api-access-429l5\") pod \"redhat-operators-hmqvm\" (UID: \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\") " pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.304118 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.592320 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m"] Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.598413 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.600224 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-config-data" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.600260 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-scripts" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.600605 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-nova-kuttl-dockercfg-6jltd" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.612670 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m"] Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.648050 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d169e211-d958-49c3-b47c-8aaa334f37b8-config-data\") pod \"nova-kuttl-cell0-conductor-db-sync-cl75m\" (UID: \"d169e211-d958-49c3-b47c-8aaa334f37b8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.648119 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d169e211-d958-49c3-b47c-8aaa334f37b8-scripts\") pod \"nova-kuttl-cell0-conductor-db-sync-cl75m\" (UID: \"d169e211-d958-49c3-b47c-8aaa334f37b8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.648146 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfv2r\" (UniqueName: \"kubernetes.io/projected/d169e211-d958-49c3-b47c-8aaa334f37b8-kube-api-access-tfv2r\") pod \"nova-kuttl-cell0-conductor-db-sync-cl75m\" (UID: \"d169e211-d958-49c3-b47c-8aaa334f37b8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.750248 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d169e211-d958-49c3-b47c-8aaa334f37b8-scripts\") pod \"nova-kuttl-cell0-conductor-db-sync-cl75m\" (UID: \"d169e211-d958-49c3-b47c-8aaa334f37b8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.750357 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfv2r\" (UniqueName: \"kubernetes.io/projected/d169e211-d958-49c3-b47c-8aaa334f37b8-kube-api-access-tfv2r\") pod \"nova-kuttl-cell0-conductor-db-sync-cl75m\" (UID: \"d169e211-d958-49c3-b47c-8aaa334f37b8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.750630 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d169e211-d958-49c3-b47c-8aaa334f37b8-config-data\") pod \"nova-kuttl-cell0-conductor-db-sync-cl75m\" (UID: \"d169e211-d958-49c3-b47c-8aaa334f37b8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.758730 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d169e211-d958-49c3-b47c-8aaa334f37b8-scripts\") pod \"nova-kuttl-cell0-conductor-db-sync-cl75m\" (UID: \"d169e211-d958-49c3-b47c-8aaa334f37b8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.759758 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d169e211-d958-49c3-b47c-8aaa334f37b8-config-data\") pod \"nova-kuttl-cell0-conductor-db-sync-cl75m\" (UID: \"d169e211-d958-49c3-b47c-8aaa334f37b8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.768231 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfv2r\" (UniqueName: \"kubernetes.io/projected/d169e211-d958-49c3-b47c-8aaa334f37b8-kube-api-access-tfv2r\") pod \"nova-kuttl-cell0-conductor-db-sync-cl75m\" (UID: \"d169e211-d958-49c3-b47c-8aaa334f37b8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.780297 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hmqvm"] Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.921792 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.998340 4711 generic.go:334] "Generic (PLEG): container finished" podID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" containerID="b769b7be5aae9dffa0cfbd814c529fc00d6626207149d296fe1f6698b0f7c38c" exitCode=0 Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.999432 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hmqvm" event={"ID":"6e4096eb-c9d6-48b8-b649-e59f16dc4167","Type":"ContainerDied","Data":"b769b7be5aae9dffa0cfbd814c529fc00d6626207149d296fe1f6698b0f7c38c"} Jan 23 08:43:44 crc kubenswrapper[4711]: I0123 08:43:44.999563 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hmqvm" event={"ID":"6e4096eb-c9d6-48b8-b649-e59f16dc4167","Type":"ContainerStarted","Data":"4f5dba449a440ea3f49ebee6953495dd7a5c71961d72890cf9039cb1d2a51543"} Jan 23 08:43:45 crc kubenswrapper[4711]: I0123 08:43:45.416018 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m"] Jan 23 08:43:46 crc kubenswrapper[4711]: I0123 08:43:46.024733 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" event={"ID":"d169e211-d958-49c3-b47c-8aaa334f37b8","Type":"ContainerStarted","Data":"56fb2b53d1524cfde24a287f07438d7134a7066a67d27544437069f85a5781d5"} Jan 23 08:43:47 crc kubenswrapper[4711]: I0123 08:43:47.033481 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hmqvm" event={"ID":"6e4096eb-c9d6-48b8-b649-e59f16dc4167","Type":"ContainerStarted","Data":"15b192077714ca994ffb897cce3550b66d27610c5e5ed17688616bc90745d77c"} Jan 23 08:43:49 crc kubenswrapper[4711]: I0123 08:43:49.054757 4711 generic.go:334] "Generic (PLEG): container finished" podID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" containerID="15b192077714ca994ffb897cce3550b66d27610c5e5ed17688616bc90745d77c" exitCode=0 Jan 23 08:43:49 crc kubenswrapper[4711]: I0123 08:43:49.054937 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hmqvm" event={"ID":"6e4096eb-c9d6-48b8-b649-e59f16dc4167","Type":"ContainerDied","Data":"15b192077714ca994ffb897cce3550b66d27610c5e5ed17688616bc90745d77c"} Jan 23 08:43:55 crc kubenswrapper[4711]: I0123 08:43:55.993785 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:43:55 crc kubenswrapper[4711]: I0123 08:43:55.994735 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:44:04 crc kubenswrapper[4711]: I0123 08:44:04.178386 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hmqvm" event={"ID":"6e4096eb-c9d6-48b8-b649-e59f16dc4167","Type":"ContainerStarted","Data":"ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c"} Jan 23 08:44:04 crc kubenswrapper[4711]: I0123 08:44:04.181181 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" event={"ID":"d169e211-d958-49c3-b47c-8aaa334f37b8","Type":"ContainerStarted","Data":"89a7362b668453fd9e6f188d304eb3dbae1158f3c031a79cb0e2cface0ee1e17"} Jan 23 08:44:04 crc kubenswrapper[4711]: I0123 08:44:04.221194 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hmqvm" podStartSLOduration=2.92657872 podStartE2EDuration="21.221168277s" podCreationTimestamp="2026-01-23 08:43:43 +0000 UTC" firstStartedPulling="2026-01-23 08:43:45.000455486 +0000 UTC m=+1410.573411854" lastFinishedPulling="2026-01-23 08:44:03.295045003 +0000 UTC m=+1428.868001411" observedRunningTime="2026-01-23 08:44:04.215067849 +0000 UTC m=+1429.788024217" watchObservedRunningTime="2026-01-23 08:44:04.221168277 +0000 UTC m=+1429.794124645" Jan 23 08:44:04 crc kubenswrapper[4711]: I0123 08:44:04.237737 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" podStartSLOduration=2.368098348 podStartE2EDuration="20.23771921s" podCreationTimestamp="2026-01-23 08:43:44 +0000 UTC" firstStartedPulling="2026-01-23 08:43:45.425581575 +0000 UTC m=+1410.998537943" lastFinishedPulling="2026-01-23 08:44:03.295202437 +0000 UTC m=+1428.868158805" observedRunningTime="2026-01-23 08:44:04.235862495 +0000 UTC m=+1429.808818883" watchObservedRunningTime="2026-01-23 08:44:04.23771921 +0000 UTC m=+1429.810675578" Jan 23 08:44:04 crc kubenswrapper[4711]: I0123 08:44:04.309884 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:44:04 crc kubenswrapper[4711]: I0123 08:44:04.310152 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:44:05 crc kubenswrapper[4711]: I0123 08:44:05.350519 4711 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hmqvm" podUID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" containerName="registry-server" probeResult="failure" output=< Jan 23 08:44:05 crc kubenswrapper[4711]: timeout: failed to connect service ":50051" within 1s Jan 23 08:44:05 crc kubenswrapper[4711]: > Jan 23 08:44:14 crc kubenswrapper[4711]: I0123 08:44:14.361973 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:44:14 crc kubenswrapper[4711]: I0123 08:44:14.437683 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.184040 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hmqvm"] Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.271072 4711 generic.go:334] "Generic (PLEG): container finished" podID="d169e211-d958-49c3-b47c-8aaa334f37b8" containerID="89a7362b668453fd9e6f188d304eb3dbae1158f3c031a79cb0e2cface0ee1e17" exitCode=0 Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.271106 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" event={"ID":"d169e211-d958-49c3-b47c-8aaa334f37b8","Type":"ContainerDied","Data":"89a7362b668453fd9e6f188d304eb3dbae1158f3c031a79cb0e2cface0ee1e17"} Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.271286 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hmqvm" podUID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" containerName="registry-server" containerID="cri-o://ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c" gracePeriod=2 Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.658931 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.786042 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-429l5\" (UniqueName: \"kubernetes.io/projected/6e4096eb-c9d6-48b8-b649-e59f16dc4167-kube-api-access-429l5\") pod \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\" (UID: \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\") " Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.786135 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e4096eb-c9d6-48b8-b649-e59f16dc4167-catalog-content\") pod \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\" (UID: \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\") " Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.786203 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e4096eb-c9d6-48b8-b649-e59f16dc4167-utilities\") pod \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\" (UID: \"6e4096eb-c9d6-48b8-b649-e59f16dc4167\") " Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.787535 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e4096eb-c9d6-48b8-b649-e59f16dc4167-utilities" (OuterVolumeSpecName: "utilities") pod "6e4096eb-c9d6-48b8-b649-e59f16dc4167" (UID: "6e4096eb-c9d6-48b8-b649-e59f16dc4167"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.791771 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e4096eb-c9d6-48b8-b649-e59f16dc4167-kube-api-access-429l5" (OuterVolumeSpecName: "kube-api-access-429l5") pod "6e4096eb-c9d6-48b8-b649-e59f16dc4167" (UID: "6e4096eb-c9d6-48b8-b649-e59f16dc4167"). InnerVolumeSpecName "kube-api-access-429l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.888032 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e4096eb-c9d6-48b8-b649-e59f16dc4167-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.889281 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-429l5\" (UniqueName: \"kubernetes.io/projected/6e4096eb-c9d6-48b8-b649-e59f16dc4167-kube-api-access-429l5\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.909081 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e4096eb-c9d6-48b8-b649-e59f16dc4167-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6e4096eb-c9d6-48b8-b649-e59f16dc4167" (UID: "6e4096eb-c9d6-48b8-b649-e59f16dc4167"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:44:16 crc kubenswrapper[4711]: I0123 08:44:16.990407 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e4096eb-c9d6-48b8-b649-e59f16dc4167-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.283374 4711 generic.go:334] "Generic (PLEG): container finished" podID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" containerID="ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c" exitCode=0 Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.283467 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hmqvm" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.283475 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hmqvm" event={"ID":"6e4096eb-c9d6-48b8-b649-e59f16dc4167","Type":"ContainerDied","Data":"ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c"} Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.283575 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hmqvm" event={"ID":"6e4096eb-c9d6-48b8-b649-e59f16dc4167","Type":"ContainerDied","Data":"4f5dba449a440ea3f49ebee6953495dd7a5c71961d72890cf9039cb1d2a51543"} Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.283607 4711 scope.go:117] "RemoveContainer" containerID="ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.309981 4711 scope.go:117] "RemoveContainer" containerID="15b192077714ca994ffb897cce3550b66d27610c5e5ed17688616bc90745d77c" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.351634 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hmqvm"] Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.354724 4711 scope.go:117] "RemoveContainer" containerID="b769b7be5aae9dffa0cfbd814c529fc00d6626207149d296fe1f6698b0f7c38c" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.363340 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hmqvm"] Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.379940 4711 scope.go:117] "RemoveContainer" containerID="ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c" Jan 23 08:44:17 crc kubenswrapper[4711]: E0123 08:44:17.390655 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c\": container with ID starting with ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c not found: ID does not exist" containerID="ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.390703 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c"} err="failed to get container status \"ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c\": rpc error: code = NotFound desc = could not find container \"ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c\": container with ID starting with ebb8ef7bc6790fc62c38008114a2ab64ba530bb49ecc0335e52ee293a008d45c not found: ID does not exist" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.390728 4711 scope.go:117] "RemoveContainer" containerID="15b192077714ca994ffb897cce3550b66d27610c5e5ed17688616bc90745d77c" Jan 23 08:44:17 crc kubenswrapper[4711]: E0123 08:44:17.391241 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15b192077714ca994ffb897cce3550b66d27610c5e5ed17688616bc90745d77c\": container with ID starting with 15b192077714ca994ffb897cce3550b66d27610c5e5ed17688616bc90745d77c not found: ID does not exist" containerID="15b192077714ca994ffb897cce3550b66d27610c5e5ed17688616bc90745d77c" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.391295 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15b192077714ca994ffb897cce3550b66d27610c5e5ed17688616bc90745d77c"} err="failed to get container status \"15b192077714ca994ffb897cce3550b66d27610c5e5ed17688616bc90745d77c\": rpc error: code = NotFound desc = could not find container \"15b192077714ca994ffb897cce3550b66d27610c5e5ed17688616bc90745d77c\": container with ID starting with 15b192077714ca994ffb897cce3550b66d27610c5e5ed17688616bc90745d77c not found: ID does not exist" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.391327 4711 scope.go:117] "RemoveContainer" containerID="b769b7be5aae9dffa0cfbd814c529fc00d6626207149d296fe1f6698b0f7c38c" Jan 23 08:44:17 crc kubenswrapper[4711]: E0123 08:44:17.392082 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b769b7be5aae9dffa0cfbd814c529fc00d6626207149d296fe1f6698b0f7c38c\": container with ID starting with b769b7be5aae9dffa0cfbd814c529fc00d6626207149d296fe1f6698b0f7c38c not found: ID does not exist" containerID="b769b7be5aae9dffa0cfbd814c529fc00d6626207149d296fe1f6698b0f7c38c" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.392152 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b769b7be5aae9dffa0cfbd814c529fc00d6626207149d296fe1f6698b0f7c38c"} err="failed to get container status \"b769b7be5aae9dffa0cfbd814c529fc00d6626207149d296fe1f6698b0f7c38c\": rpc error: code = NotFound desc = could not find container \"b769b7be5aae9dffa0cfbd814c529fc00d6626207149d296fe1f6698b0f7c38c\": container with ID starting with b769b7be5aae9dffa0cfbd814c529fc00d6626207149d296fe1f6698b0f7c38c not found: ID does not exist" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.487648 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" path="/var/lib/kubelet/pods/6e4096eb-c9d6-48b8-b649-e59f16dc4167/volumes" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.584436 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.700620 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d169e211-d958-49c3-b47c-8aaa334f37b8-config-data\") pod \"d169e211-d958-49c3-b47c-8aaa334f37b8\" (UID: \"d169e211-d958-49c3-b47c-8aaa334f37b8\") " Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.700675 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d169e211-d958-49c3-b47c-8aaa334f37b8-scripts\") pod \"d169e211-d958-49c3-b47c-8aaa334f37b8\" (UID: \"d169e211-d958-49c3-b47c-8aaa334f37b8\") " Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.700763 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfv2r\" (UniqueName: \"kubernetes.io/projected/d169e211-d958-49c3-b47c-8aaa334f37b8-kube-api-access-tfv2r\") pod \"d169e211-d958-49c3-b47c-8aaa334f37b8\" (UID: \"d169e211-d958-49c3-b47c-8aaa334f37b8\") " Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.705286 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d169e211-d958-49c3-b47c-8aaa334f37b8-scripts" (OuterVolumeSpecName: "scripts") pod "d169e211-d958-49c3-b47c-8aaa334f37b8" (UID: "d169e211-d958-49c3-b47c-8aaa334f37b8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.705320 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d169e211-d958-49c3-b47c-8aaa334f37b8-kube-api-access-tfv2r" (OuterVolumeSpecName: "kube-api-access-tfv2r") pod "d169e211-d958-49c3-b47c-8aaa334f37b8" (UID: "d169e211-d958-49c3-b47c-8aaa334f37b8"). InnerVolumeSpecName "kube-api-access-tfv2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.727364 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d169e211-d958-49c3-b47c-8aaa334f37b8-config-data" (OuterVolumeSpecName: "config-data") pod "d169e211-d958-49c3-b47c-8aaa334f37b8" (UID: "d169e211-d958-49c3-b47c-8aaa334f37b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.802787 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d169e211-d958-49c3-b47c-8aaa334f37b8-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.802826 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfv2r\" (UniqueName: \"kubernetes.io/projected/d169e211-d958-49c3-b47c-8aaa334f37b8-kube-api-access-tfv2r\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:17 crc kubenswrapper[4711]: I0123 08:44:17.802841 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d169e211-d958-49c3-b47c-8aaa334f37b8-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.293768 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" event={"ID":"d169e211-d958-49c3-b47c-8aaa334f37b8","Type":"ContainerDied","Data":"56fb2b53d1524cfde24a287f07438d7134a7066a67d27544437069f85a5781d5"} Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.294066 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56fb2b53d1524cfde24a287f07438d7134a7066a67d27544437069f85a5781d5" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.293806 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.407409 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:44:18 crc kubenswrapper[4711]: E0123 08:44:18.407886 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d169e211-d958-49c3-b47c-8aaa334f37b8" containerName="nova-kuttl-cell0-conductor-db-sync" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.407906 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d169e211-d958-49c3-b47c-8aaa334f37b8" containerName="nova-kuttl-cell0-conductor-db-sync" Jan 23 08:44:18 crc kubenswrapper[4711]: E0123 08:44:18.407933 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" containerName="registry-server" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.407941 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" containerName="registry-server" Jan 23 08:44:18 crc kubenswrapper[4711]: E0123 08:44:18.407960 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" containerName="extract-utilities" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.407970 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" containerName="extract-utilities" Jan 23 08:44:18 crc kubenswrapper[4711]: E0123 08:44:18.407983 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" containerName="extract-content" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.407990 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" containerName="extract-content" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.408195 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e4096eb-c9d6-48b8-b649-e59f16dc4167" containerName="registry-server" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.408222 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d169e211-d958-49c3-b47c-8aaa334f37b8" containerName="nova-kuttl-cell0-conductor-db-sync" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.410711 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.411022 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59ac78e6-c45e-42a0-b959-d1225d2e7d44-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"59ac78e6-c45e-42a0-b959-d1225d2e7d44\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.411062 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g46wf\" (UniqueName: \"kubernetes.io/projected/59ac78e6-c45e-42a0-b959-d1225d2e7d44-kube-api-access-g46wf\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"59ac78e6-c45e-42a0-b959-d1225d2e7d44\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.415646 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-config-data" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.415824 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-nova-kuttl-dockercfg-6jltd" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.422614 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.513788 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59ac78e6-c45e-42a0-b959-d1225d2e7d44-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"59ac78e6-c45e-42a0-b959-d1225d2e7d44\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.513841 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g46wf\" (UniqueName: \"kubernetes.io/projected/59ac78e6-c45e-42a0-b959-d1225d2e7d44-kube-api-access-g46wf\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"59ac78e6-c45e-42a0-b959-d1225d2e7d44\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.519669 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59ac78e6-c45e-42a0-b959-d1225d2e7d44-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"59ac78e6-c45e-42a0-b959-d1225d2e7d44\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.534342 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g46wf\" (UniqueName: \"kubernetes.io/projected/59ac78e6-c45e-42a0-b959-d1225d2e7d44-kube-api-access-g46wf\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"59ac78e6-c45e-42a0-b959-d1225d2e7d44\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:44:18 crc kubenswrapper[4711]: I0123 08:44:18.727123 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:44:19 crc kubenswrapper[4711]: I0123 08:44:19.178538 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:44:19 crc kubenswrapper[4711]: W0123 08:44:19.182683 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59ac78e6_c45e_42a0_b959_d1225d2e7d44.slice/crio-49632a46baaa3484224380653ba747b58fb6a0074a055e534c8478c6d5342037 WatchSource:0}: Error finding container 49632a46baaa3484224380653ba747b58fb6a0074a055e534c8478c6d5342037: Status 404 returned error can't find the container with id 49632a46baaa3484224380653ba747b58fb6a0074a055e534c8478c6d5342037 Jan 23 08:44:19 crc kubenswrapper[4711]: I0123 08:44:19.301991 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"59ac78e6-c45e-42a0-b959-d1225d2e7d44","Type":"ContainerStarted","Data":"49632a46baaa3484224380653ba747b58fb6a0074a055e534c8478c6d5342037"} Jan 23 08:44:20 crc kubenswrapper[4711]: I0123 08:44:20.313111 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"59ac78e6-c45e-42a0-b959-d1225d2e7d44","Type":"ContainerStarted","Data":"62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a"} Jan 23 08:44:20 crc kubenswrapper[4711]: I0123 08:44:20.313540 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:44:20 crc kubenswrapper[4711]: I0123 08:44:20.329805 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" podStartSLOduration=2.329785737 podStartE2EDuration="2.329785737s" podCreationTimestamp="2026-01-23 08:44:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:44:20.327655085 +0000 UTC m=+1445.900611453" watchObservedRunningTime="2026-01-23 08:44:20.329785737 +0000 UTC m=+1445.902742105" Jan 23 08:44:26 crc kubenswrapper[4711]: I0123 08:44:25.994822 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:44:26 crc kubenswrapper[4711]: I0123 08:44:25.995646 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:44:28 crc kubenswrapper[4711]: I0123 08:44:28.760216 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.352486 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr"] Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.353628 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.360756 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-manage-config-data" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.360997 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-manage-scripts" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.368079 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr"] Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.490724 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-scripts\") pod \"nova-kuttl-cell0-cell-mapping-nljvr\" (UID: \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.490833 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzwjr\" (UniqueName: \"kubernetes.io/projected/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-kube-api-access-pzwjr\") pod \"nova-kuttl-cell0-cell-mapping-nljvr\" (UID: \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.490896 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-config-data\") pod \"nova-kuttl-cell0-cell-mapping-nljvr\" (UID: \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.594361 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-scripts\") pod \"nova-kuttl-cell0-cell-mapping-nljvr\" (UID: \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.594435 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzwjr\" (UniqueName: \"kubernetes.io/projected/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-kube-api-access-pzwjr\") pod \"nova-kuttl-cell0-cell-mapping-nljvr\" (UID: \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.596536 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-config-data\") pod \"nova-kuttl-cell0-cell-mapping-nljvr\" (UID: \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.599210 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.600997 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.602905 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-scripts\") pod \"nova-kuttl-cell0-cell-mapping-nljvr\" (UID: \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.603949 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-config-data\") pod \"nova-kuttl-cell0-cell-mapping-nljvr\" (UID: \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.617881 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-api-config-data" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.623694 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.634581 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzwjr\" (UniqueName: \"kubernetes.io/projected/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-kube-api-access-pzwjr\") pod \"nova-kuttl-cell0-cell-mapping-nljvr\" (UID: \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.672212 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.684471 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.685817 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.687855 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-scheduler-config-data" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.727717 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.742536 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.744229 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.747900 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-metadata-config-data" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.792332 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.799460 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7e3b82b-d747-491e-895f-1bbea9463e1b-logs\") pod \"nova-kuttl-api-0\" (UID: \"c7e3b82b-d747-491e-895f-1bbea9463e1b\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.799527 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clw7g\" (UniqueName: \"kubernetes.io/projected/c7e3b82b-d747-491e-895f-1bbea9463e1b-kube-api-access-clw7g\") pod \"nova-kuttl-api-0\" (UID: \"c7e3b82b-d747-491e-895f-1bbea9463e1b\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.799587 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7e3b82b-d747-491e-895f-1bbea9463e1b-config-data\") pod \"nova-kuttl-api-0\" (UID: \"c7e3b82b-d747-491e-895f-1bbea9463e1b\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.799612 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vm8m\" (UniqueName: \"kubernetes.io/projected/8fc47765-a9d2-4df5-9f64-4d0346f86594-kube-api-access-5vm8m\") pod \"nova-kuttl-scheduler-0\" (UID: \"8fc47765-a9d2-4df5-9f64-4d0346f86594\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.799638 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fc47765-a9d2-4df5-9f64-4d0346f86594-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"8fc47765-a9d2-4df5-9f64-4d0346f86594\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.800292 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.801196 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.804064 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-novncproxy-config-data" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.826558 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.900481 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n7mf\" (UniqueName: \"kubernetes.io/projected/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-kube-api-access-6n7mf\") pod \"nova-kuttl-metadata-0\" (UID: \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.900580 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7e3b82b-d747-491e-895f-1bbea9463e1b-logs\") pod \"nova-kuttl-api-0\" (UID: \"c7e3b82b-d747-491e-895f-1bbea9463e1b\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.900612 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.900683 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.900709 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vm8m\" (UniqueName: \"kubernetes.io/projected/8fc47765-a9d2-4df5-9f64-4d0346f86594-kube-api-access-5vm8m\") pod \"nova-kuttl-scheduler-0\" (UID: \"8fc47765-a9d2-4df5-9f64-4d0346f86594\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.900739 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fc47765-a9d2-4df5-9f64-4d0346f86594-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"8fc47765-a9d2-4df5-9f64-4d0346f86594\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.900786 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clw7g\" (UniqueName: \"kubernetes.io/projected/c7e3b82b-d747-491e-895f-1bbea9463e1b-kube-api-access-clw7g\") pod \"nova-kuttl-api-0\" (UID: \"c7e3b82b-d747-491e-895f-1bbea9463e1b\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.900829 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7e3b82b-d747-491e-895f-1bbea9463e1b-config-data\") pod \"nova-kuttl-api-0\" (UID: \"c7e3b82b-d747-491e-895f-1bbea9463e1b\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.904252 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7e3b82b-d747-491e-895f-1bbea9463e1b-logs\") pod \"nova-kuttl-api-0\" (UID: \"c7e3b82b-d747-491e-895f-1bbea9463e1b\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.909085 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fc47765-a9d2-4df5-9f64-4d0346f86594-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"8fc47765-a9d2-4df5-9f64-4d0346f86594\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.909161 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7e3b82b-d747-491e-895f-1bbea9463e1b-config-data\") pod \"nova-kuttl-api-0\" (UID: \"c7e3b82b-d747-491e-895f-1bbea9463e1b\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.922973 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vm8m\" (UniqueName: \"kubernetes.io/projected/8fc47765-a9d2-4df5-9f64-4d0346f86594-kube-api-access-5vm8m\") pod \"nova-kuttl-scheduler-0\" (UID: \"8fc47765-a9d2-4df5-9f64-4d0346f86594\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:29 crc kubenswrapper[4711]: I0123 08:44:29.927907 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clw7g\" (UniqueName: \"kubernetes.io/projected/c7e3b82b-d747-491e-895f-1bbea9463e1b-kube-api-access-clw7g\") pod \"nova-kuttl-api-0\" (UID: \"c7e3b82b-d747-491e-895f-1bbea9463e1b\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.001679 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b9edb96-ed68-4de0-85a1-e40bd22b63b1-config-data\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"2b9edb96-ed68-4de0-85a1-e40bd22b63b1\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.001723 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n7mf\" (UniqueName: \"kubernetes.io/projected/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-kube-api-access-6n7mf\") pod \"nova-kuttl-metadata-0\" (UID: \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.001764 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98wk2\" (UniqueName: \"kubernetes.io/projected/2b9edb96-ed68-4de0-85a1-e40bd22b63b1-kube-api-access-98wk2\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"2b9edb96-ed68-4de0-85a1-e40bd22b63b1\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.002303 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.002465 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.002962 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.005780 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.017992 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n7mf\" (UniqueName: \"kubernetes.io/projected/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-kube-api-access-6n7mf\") pod \"nova-kuttl-metadata-0\" (UID: \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.060189 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.076073 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.086033 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.108611 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b9edb96-ed68-4de0-85a1-e40bd22b63b1-config-data\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"2b9edb96-ed68-4de0-85a1-e40bd22b63b1\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.108697 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98wk2\" (UniqueName: \"kubernetes.io/projected/2b9edb96-ed68-4de0-85a1-e40bd22b63b1-kube-api-access-98wk2\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"2b9edb96-ed68-4de0-85a1-e40bd22b63b1\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.119458 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b9edb96-ed68-4de0-85a1-e40bd22b63b1-config-data\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"2b9edb96-ed68-4de0-85a1-e40bd22b63b1\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.126660 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98wk2\" (UniqueName: \"kubernetes.io/projected/2b9edb96-ed68-4de0-85a1-e40bd22b63b1-kube-api-access-98wk2\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"2b9edb96-ed68-4de0-85a1-e40bd22b63b1\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.257366 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr"] Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.337125 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2"] Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.338924 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.343211 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2"] Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.343903 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-scripts" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.344218 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-config-data" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.412151 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8v2v\" (UniqueName: \"kubernetes.io/projected/9e6e254a-f600-44ae-8925-6ad10a1220e5-kube-api-access-w8v2v\") pod \"nova-kuttl-cell1-conductor-db-sync-w8xz2\" (UID: \"9e6e254a-f600-44ae-8925-6ad10a1220e5\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.412226 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e6e254a-f600-44ae-8925-6ad10a1220e5-config-data\") pod \"nova-kuttl-cell1-conductor-db-sync-w8xz2\" (UID: \"9e6e254a-f600-44ae-8925-6ad10a1220e5\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.412291 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e6e254a-f600-44ae-8925-6ad10a1220e5-scripts\") pod \"nova-kuttl-cell1-conductor-db-sync-w8xz2\" (UID: \"9e6e254a-f600-44ae-8925-6ad10a1220e5\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.421104 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.445135 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" event={"ID":"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7","Type":"ContainerStarted","Data":"f08642169a892a4e6bf734388a28d02949d7af46e34652ec665560a0b10fd993"} Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.445177 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" event={"ID":"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7","Type":"ContainerStarted","Data":"0c88ef53e6a5c529a335f8b737db530630dc810b96715ca24438c77fe4d2290c"} Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.507558 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.514027 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8v2v\" (UniqueName: \"kubernetes.io/projected/9e6e254a-f600-44ae-8925-6ad10a1220e5-kube-api-access-w8v2v\") pod \"nova-kuttl-cell1-conductor-db-sync-w8xz2\" (UID: \"9e6e254a-f600-44ae-8925-6ad10a1220e5\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.514092 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e6e254a-f600-44ae-8925-6ad10a1220e5-config-data\") pod \"nova-kuttl-cell1-conductor-db-sync-w8xz2\" (UID: \"9e6e254a-f600-44ae-8925-6ad10a1220e5\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.514145 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e6e254a-f600-44ae-8925-6ad10a1220e5-scripts\") pod \"nova-kuttl-cell1-conductor-db-sync-w8xz2\" (UID: \"9e6e254a-f600-44ae-8925-6ad10a1220e5\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.517665 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e6e254a-f600-44ae-8925-6ad10a1220e5-scripts\") pod \"nova-kuttl-cell1-conductor-db-sync-w8xz2\" (UID: \"9e6e254a-f600-44ae-8925-6ad10a1220e5\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.517822 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e6e254a-f600-44ae-8925-6ad10a1220e5-config-data\") pod \"nova-kuttl-cell1-conductor-db-sync-w8xz2\" (UID: \"9e6e254a-f600-44ae-8925-6ad10a1220e5\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:30 crc kubenswrapper[4711]: W0123 08:44:30.522036 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7e3b82b_d747_491e_895f_1bbea9463e1b.slice/crio-88322d7a3bd2997df475694967599722255c465969b12a56093da1b32061aa17 WatchSource:0}: Error finding container 88322d7a3bd2997df475694967599722255c465969b12a56093da1b32061aa17: Status 404 returned error can't find the container with id 88322d7a3bd2997df475694967599722255c465969b12a56093da1b32061aa17 Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.531304 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8v2v\" (UniqueName: \"kubernetes.io/projected/9e6e254a-f600-44ae-8925-6ad10a1220e5-kube-api-access-w8v2v\") pod \"nova-kuttl-cell1-conductor-db-sync-w8xz2\" (UID: \"9e6e254a-f600-44ae-8925-6ad10a1220e5\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:30 crc kubenswrapper[4711]: I0123 08:44:30.671658 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:31 crc kubenswrapper[4711]: I0123 08:44:31.433436 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:44:31 crc kubenswrapper[4711]: I0123 08:44:31.485743 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"c7e3b82b-d747-491e-895f-1bbea9463e1b","Type":"ContainerStarted","Data":"88322d7a3bd2997df475694967599722255c465969b12a56093da1b32061aa17"} Jan 23 08:44:31 crc kubenswrapper[4711]: I0123 08:44:31.485795 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:44:31 crc kubenswrapper[4711]: I0123 08:44:31.493323 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:44:31 crc kubenswrapper[4711]: W0123 08:44:31.502204 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fc47765_a9d2_4df5_9f64_4d0346f86594.slice/crio-8b597303918db81c275a97a7eeb92a38be792d1a509b80290db66760cd13ea3e WatchSource:0}: Error finding container 8b597303918db81c275a97a7eeb92a38be792d1a509b80290db66760cd13ea3e: Status 404 returned error can't find the container with id 8b597303918db81c275a97a7eeb92a38be792d1a509b80290db66760cd13ea3e Jan 23 08:44:31 crc kubenswrapper[4711]: W0123 08:44:31.505143 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc7fd5a9_ac7c_466a_8e30_b046ef1839a4.slice/crio-4225ebe0857970ee2226aa2164081074e38bed4bcd55ede0cd2fd8e2000fb11c WatchSource:0}: Error finding container 4225ebe0857970ee2226aa2164081074e38bed4bcd55ede0cd2fd8e2000fb11c: Status 404 returned error can't find the container with id 4225ebe0857970ee2226aa2164081074e38bed4bcd55ede0cd2fd8e2000fb11c Jan 23 08:44:31 crc kubenswrapper[4711]: I0123 08:44:31.509747 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" podStartSLOduration=2.509722959 podStartE2EDuration="2.509722959s" podCreationTimestamp="2026-01-23 08:44:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:44:31.487568979 +0000 UTC m=+1457.060525347" watchObservedRunningTime="2026-01-23 08:44:31.509722959 +0000 UTC m=+1457.082679337" Jan 23 08:44:31 crc kubenswrapper[4711]: I0123 08:44:31.809179 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2"] Jan 23 08:44:31 crc kubenswrapper[4711]: W0123 08:44:31.818458 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e6e254a_f600_44ae_8925_6ad10a1220e5.slice/crio-1172873c3404f589021becfd199c9b61807e0680765cfa2f41a5d3c09f030d14 WatchSource:0}: Error finding container 1172873c3404f589021becfd199c9b61807e0680765cfa2f41a5d3c09f030d14: Status 404 returned error can't find the container with id 1172873c3404f589021becfd199c9b61807e0680765cfa2f41a5d3c09f030d14 Jan 23 08:44:32 crc kubenswrapper[4711]: I0123 08:44:32.487955 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" event={"ID":"9e6e254a-f600-44ae-8925-6ad10a1220e5","Type":"ContainerStarted","Data":"3542ccc41defdb0e51d72c252143be1bc5895c650d6374de75785663334df6c9"} Jan 23 08:44:32 crc kubenswrapper[4711]: I0123 08:44:32.488321 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" event={"ID":"9e6e254a-f600-44ae-8925-6ad10a1220e5","Type":"ContainerStarted","Data":"1172873c3404f589021becfd199c9b61807e0680765cfa2f41a5d3c09f030d14"} Jan 23 08:44:32 crc kubenswrapper[4711]: I0123 08:44:32.490783 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"2b9edb96-ed68-4de0-85a1-e40bd22b63b1","Type":"ContainerStarted","Data":"438185acd24559f07962f610b18cb0ef57659ecf70295317aa623416f63f601c"} Jan 23 08:44:32 crc kubenswrapper[4711]: I0123 08:44:32.492023 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4","Type":"ContainerStarted","Data":"4225ebe0857970ee2226aa2164081074e38bed4bcd55ede0cd2fd8e2000fb11c"} Jan 23 08:44:32 crc kubenswrapper[4711]: I0123 08:44:32.493099 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"8fc47765-a9d2-4df5-9f64-4d0346f86594","Type":"ContainerStarted","Data":"8b597303918db81c275a97a7eeb92a38be792d1a509b80290db66760cd13ea3e"} Jan 23 08:44:33 crc kubenswrapper[4711]: I0123 08:44:33.503029 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4","Type":"ContainerStarted","Data":"789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7"} Jan 23 08:44:33 crc kubenswrapper[4711]: I0123 08:44:33.503098 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4","Type":"ContainerStarted","Data":"2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485"} Jan 23 08:44:33 crc kubenswrapper[4711]: I0123 08:44:33.504867 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"c7e3b82b-d747-491e-895f-1bbea9463e1b","Type":"ContainerStarted","Data":"f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365"} Jan 23 08:44:33 crc kubenswrapper[4711]: I0123 08:44:33.504907 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"c7e3b82b-d747-491e-895f-1bbea9463e1b","Type":"ContainerStarted","Data":"b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56"} Jan 23 08:44:34 crc kubenswrapper[4711]: I0123 08:44:34.544405 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" podStartSLOduration=4.544378511 podStartE2EDuration="4.544378511s" podCreationTimestamp="2026-01-23 08:44:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:44:32.512977681 +0000 UTC m=+1458.085934049" watchObservedRunningTime="2026-01-23 08:44:34.544378511 +0000 UTC m=+1460.117334889" Jan 23 08:44:34 crc kubenswrapper[4711]: I0123 08:44:34.562466 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-0" podStartSLOduration=3.422112299 podStartE2EDuration="5.562369579s" podCreationTimestamp="2026-01-23 08:44:29 +0000 UTC" firstStartedPulling="2026-01-23 08:44:30.528118724 +0000 UTC m=+1456.101075082" lastFinishedPulling="2026-01-23 08:44:32.668376004 +0000 UTC m=+1458.241332362" observedRunningTime="2026-01-23 08:44:34.561146899 +0000 UTC m=+1460.134103287" watchObservedRunningTime="2026-01-23 08:44:34.562369579 +0000 UTC m=+1460.135325957" Jan 23 08:44:34 crc kubenswrapper[4711]: I0123 08:44:34.570112 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-0" podStartSLOduration=4.382496198 podStartE2EDuration="5.570093747s" podCreationTimestamp="2026-01-23 08:44:29 +0000 UTC" firstStartedPulling="2026-01-23 08:44:31.516191497 +0000 UTC m=+1457.089147865" lastFinishedPulling="2026-01-23 08:44:32.703789046 +0000 UTC m=+1458.276745414" observedRunningTime="2026-01-23 08:44:34.539715668 +0000 UTC m=+1460.112672036" watchObservedRunningTime="2026-01-23 08:44:34.570093747 +0000 UTC m=+1460.143050125" Jan 23 08:44:35 crc kubenswrapper[4711]: I0123 08:44:35.087486 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:35 crc kubenswrapper[4711]: I0123 08:44:35.087899 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:35 crc kubenswrapper[4711]: I0123 08:44:35.528850 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"2b9edb96-ed68-4de0-85a1-e40bd22b63b1","Type":"ContainerStarted","Data":"1b59c5a45bec5d7be1d5919c52c8fb4fa3ee4e5bbf942fc6e799cf8c0f8112e2"} Jan 23 08:44:35 crc kubenswrapper[4711]: I0123 08:44:35.536447 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"8fc47765-a9d2-4df5-9f64-4d0346f86594","Type":"ContainerStarted","Data":"fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7"} Jan 23 08:44:35 crc kubenswrapper[4711]: I0123 08:44:35.563346 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podStartSLOduration=3.413229922 podStartE2EDuration="6.563323325s" podCreationTimestamp="2026-01-23 08:44:29 +0000 UTC" firstStartedPulling="2026-01-23 08:44:31.516891613 +0000 UTC m=+1457.089847981" lastFinishedPulling="2026-01-23 08:44:34.666985016 +0000 UTC m=+1460.239941384" observedRunningTime="2026-01-23 08:44:35.560984158 +0000 UTC m=+1461.133940536" watchObservedRunningTime="2026-01-23 08:44:35.563323325 +0000 UTC m=+1461.136279713" Jan 23 08:44:35 crc kubenswrapper[4711]: I0123 08:44:35.566968 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" podStartSLOduration=3.402164594 podStartE2EDuration="6.566958104s" podCreationTimestamp="2026-01-23 08:44:29 +0000 UTC" firstStartedPulling="2026-01-23 08:44:31.498022354 +0000 UTC m=+1457.070978722" lastFinishedPulling="2026-01-23 08:44:34.662815854 +0000 UTC m=+1460.235772232" observedRunningTime="2026-01-23 08:44:35.547993902 +0000 UTC m=+1461.120950290" watchObservedRunningTime="2026-01-23 08:44:35.566958104 +0000 UTC m=+1461.139914482" Jan 23 08:44:38 crc kubenswrapper[4711]: I0123 08:44:38.567806 4711 generic.go:334] "Generic (PLEG): container finished" podID="41acc8d8-39b4-4f8f-bcf1-8cb5df438be7" containerID="f08642169a892a4e6bf734388a28d02949d7af46e34652ec665560a0b10fd993" exitCode=0 Jan 23 08:44:38 crc kubenswrapper[4711]: I0123 08:44:38.567948 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" event={"ID":"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7","Type":"ContainerDied","Data":"f08642169a892a4e6bf734388a28d02949d7af46e34652ec665560a0b10fd993"} Jan 23 08:44:39 crc kubenswrapper[4711]: I0123 08:44:39.888135 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:39 crc kubenswrapper[4711]: I0123 08:44:39.987312 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzwjr\" (UniqueName: \"kubernetes.io/projected/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-kube-api-access-pzwjr\") pod \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\" (UID: \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\") " Jan 23 08:44:39 crc kubenswrapper[4711]: I0123 08:44:39.987451 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-config-data\") pod \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\" (UID: \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\") " Jan 23 08:44:39 crc kubenswrapper[4711]: I0123 08:44:39.987477 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-scripts\") pod \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\" (UID: \"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7\") " Jan 23 08:44:39 crc kubenswrapper[4711]: I0123 08:44:39.994747 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-kube-api-access-pzwjr" (OuterVolumeSpecName: "kube-api-access-pzwjr") pod "41acc8d8-39b4-4f8f-bcf1-8cb5df438be7" (UID: "41acc8d8-39b4-4f8f-bcf1-8cb5df438be7"). InnerVolumeSpecName "kube-api-access-pzwjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.000698 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-scripts" (OuterVolumeSpecName: "scripts") pod "41acc8d8-39b4-4f8f-bcf1-8cb5df438be7" (UID: "41acc8d8-39b4-4f8f-bcf1-8cb5df438be7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.030453 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-config-data" (OuterVolumeSpecName: "config-data") pod "41acc8d8-39b4-4f8f-bcf1-8cb5df438be7" (UID: "41acc8d8-39b4-4f8f-bcf1-8cb5df438be7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.060548 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.060612 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.077035 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.077172 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.086910 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.086941 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.089076 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.089107 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.089123 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzwjr\" (UniqueName: \"kubernetes.io/projected/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7-kube-api-access-pzwjr\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.102105 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.422618 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.422906 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.433917 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.583293 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" event={"ID":"41acc8d8-39b4-4f8f-bcf1-8cb5df438be7","Type":"ContainerDied","Data":"0c88ef53e6a5c529a335f8b737db530630dc810b96715ca24438c77fe4d2290c"} Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.583351 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c88ef53e6a5c529a335f8b737db530630dc810b96715ca24438c77fe4d2290c" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.583454 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.615473 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.619902 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.897262 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:44:40 crc kubenswrapper[4711]: I0123 08:44:40.964751 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:44:41 crc kubenswrapper[4711]: I0123 08:44:41.132073 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:44:41 crc kubenswrapper[4711]: I0123 08:44:41.142691 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="c7e3b82b-d747-491e-895f-1bbea9463e1b" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.132:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:44:41 crc kubenswrapper[4711]: I0123 08:44:41.224730 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.134:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:44:41 crc kubenswrapper[4711]: I0123 08:44:41.224738 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="c7e3b82b-d747-491e-895f-1bbea9463e1b" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.132:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:44:41 crc kubenswrapper[4711]: I0123 08:44:41.224859 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.134:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:44:41 crc kubenswrapper[4711]: I0123 08:44:41.589703 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" containerName="nova-kuttl-metadata-log" containerID="cri-o://2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485" gracePeriod=30 Jan 23 08:44:41 crc kubenswrapper[4711]: I0123 08:44:41.589775 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" containerName="nova-kuttl-metadata-metadata" containerID="cri-o://789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7" gracePeriod=30 Jan 23 08:44:41 crc kubenswrapper[4711]: I0123 08:44:41.590460 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="c7e3b82b-d747-491e-895f-1bbea9463e1b" containerName="nova-kuttl-api-log" containerID="cri-o://b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56" gracePeriod=30 Jan 23 08:44:41 crc kubenswrapper[4711]: I0123 08:44:41.590578 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="c7e3b82b-d747-491e-895f-1bbea9463e1b" containerName="nova-kuttl-api-api" containerID="cri-o://f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365" gracePeriod=30 Jan 23 08:44:42 crc kubenswrapper[4711]: I0123 08:44:42.600604 4711 generic.go:334] "Generic (PLEG): container finished" podID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" containerID="2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485" exitCode=143 Jan 23 08:44:42 crc kubenswrapper[4711]: I0123 08:44:42.600841 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="8fc47765-a9d2-4df5-9f64-4d0346f86594" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7" gracePeriod=30 Jan 23 08:44:42 crc kubenswrapper[4711]: I0123 08:44:42.600953 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4","Type":"ContainerDied","Data":"2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485"} Jan 23 08:44:43 crc kubenswrapper[4711]: I0123 08:44:43.608367 4711 generic.go:334] "Generic (PLEG): container finished" podID="c7e3b82b-d747-491e-895f-1bbea9463e1b" containerID="b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56" exitCode=143 Jan 23 08:44:43 crc kubenswrapper[4711]: I0123 08:44:43.608661 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"c7e3b82b-d747-491e-895f-1bbea9463e1b","Type":"ContainerDied","Data":"b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56"} Jan 23 08:44:45 crc kubenswrapper[4711]: E0123 08:44:45.078563 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:44:45 crc kubenswrapper[4711]: E0123 08:44:45.079816 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:44:45 crc kubenswrapper[4711]: E0123 08:44:45.080911 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:44:45 crc kubenswrapper[4711]: E0123 08:44:45.080952 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="8fc47765-a9d2-4df5-9f64-4d0346f86594" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.595484 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.632892 4711 generic.go:334] "Generic (PLEG): container finished" podID="8fc47765-a9d2-4df5-9f64-4d0346f86594" containerID="fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7" exitCode=0 Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.632933 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"8fc47765-a9d2-4df5-9f64-4d0346f86594","Type":"ContainerDied","Data":"fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7"} Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.632959 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"8fc47765-a9d2-4df5-9f64-4d0346f86594","Type":"ContainerDied","Data":"8b597303918db81c275a97a7eeb92a38be792d1a509b80290db66760cd13ea3e"} Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.632975 4711 scope.go:117] "RemoveContainer" containerID="fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.633095 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.665705 4711 scope.go:117] "RemoveContainer" containerID="fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7" Jan 23 08:44:46 crc kubenswrapper[4711]: E0123 08:44:46.666206 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7\": container with ID starting with fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7 not found: ID does not exist" containerID="fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.666250 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7"} err="failed to get container status \"fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7\": rpc error: code = NotFound desc = could not find container \"fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7\": container with ID starting with fb479e5e28c755921e88be268a5cdb5fb88cd7971f280d082c3d1ba82319f1b7 not found: ID does not exist" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.692176 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fc47765-a9d2-4df5-9f64-4d0346f86594-config-data\") pod \"8fc47765-a9d2-4df5-9f64-4d0346f86594\" (UID: \"8fc47765-a9d2-4df5-9f64-4d0346f86594\") " Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.692215 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vm8m\" (UniqueName: \"kubernetes.io/projected/8fc47765-a9d2-4df5-9f64-4d0346f86594-kube-api-access-5vm8m\") pod \"8fc47765-a9d2-4df5-9f64-4d0346f86594\" (UID: \"8fc47765-a9d2-4df5-9f64-4d0346f86594\") " Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.697619 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fc47765-a9d2-4df5-9f64-4d0346f86594-kube-api-access-5vm8m" (OuterVolumeSpecName: "kube-api-access-5vm8m") pod "8fc47765-a9d2-4df5-9f64-4d0346f86594" (UID: "8fc47765-a9d2-4df5-9f64-4d0346f86594"). InnerVolumeSpecName "kube-api-access-5vm8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.713974 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fc47765-a9d2-4df5-9f64-4d0346f86594-config-data" (OuterVolumeSpecName: "config-data") pod "8fc47765-a9d2-4df5-9f64-4d0346f86594" (UID: "8fc47765-a9d2-4df5-9f64-4d0346f86594"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.794102 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fc47765-a9d2-4df5-9f64-4d0346f86594-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.794133 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vm8m\" (UniqueName: \"kubernetes.io/projected/8fc47765-a9d2-4df5-9f64-4d0346f86594-kube-api-access-5vm8m\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.967972 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.974040 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.995011 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:44:46 crc kubenswrapper[4711]: E0123 08:44:46.995655 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41acc8d8-39b4-4f8f-bcf1-8cb5df438be7" containerName="nova-manage" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.995749 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="41acc8d8-39b4-4f8f-bcf1-8cb5df438be7" containerName="nova-manage" Jan 23 08:44:46 crc kubenswrapper[4711]: E0123 08:44:46.995841 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fc47765-a9d2-4df5-9f64-4d0346f86594" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.995937 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fc47765-a9d2-4df5-9f64-4d0346f86594" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.996196 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="41acc8d8-39b4-4f8f-bcf1-8cb5df438be7" containerName="nova-manage" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.996289 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fc47765-a9d2-4df5-9f64-4d0346f86594" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.997020 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:46 crc kubenswrapper[4711]: I0123 08:44:46.999814 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-scheduler-config-data" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.007267 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.099384 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zns82\" (UniqueName: \"kubernetes.io/projected/3b12f593-a449-4aa0-90a3-eabdd2e57dee-kube-api-access-zns82\") pod \"nova-kuttl-scheduler-0\" (UID: \"3b12f593-a449-4aa0-90a3-eabdd2e57dee\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.099462 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b12f593-a449-4aa0-90a3-eabdd2e57dee-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"3b12f593-a449-4aa0-90a3-eabdd2e57dee\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.201384 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zns82\" (UniqueName: \"kubernetes.io/projected/3b12f593-a449-4aa0-90a3-eabdd2e57dee-kube-api-access-zns82\") pod \"nova-kuttl-scheduler-0\" (UID: \"3b12f593-a449-4aa0-90a3-eabdd2e57dee\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.201835 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b12f593-a449-4aa0-90a3-eabdd2e57dee-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"3b12f593-a449-4aa0-90a3-eabdd2e57dee\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.205812 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b12f593-a449-4aa0-90a3-eabdd2e57dee-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"3b12f593-a449-4aa0-90a3-eabdd2e57dee\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.235065 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zns82\" (UniqueName: \"kubernetes.io/projected/3b12f593-a449-4aa0-90a3-eabdd2e57dee-kube-api-access-zns82\") pod \"nova-kuttl-scheduler-0\" (UID: \"3b12f593-a449-4aa0-90a3-eabdd2e57dee\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.326793 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.491470 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.498268 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fc47765-a9d2-4df5-9f64-4d0346f86594" path="/var/lib/kubelet/pods/8fc47765-a9d2-4df5-9f64-4d0346f86594/volumes" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.607866 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6n7mf\" (UniqueName: \"kubernetes.io/projected/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-kube-api-access-6n7mf\") pod \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\" (UID: \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\") " Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.608020 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-logs\") pod \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\" (UID: \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\") " Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.608129 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-config-data\") pod \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\" (UID: \"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4\") " Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.608707 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-logs" (OuterVolumeSpecName: "logs") pod "bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" (UID: "bc7fd5a9-ac7c-466a-8e30-b046ef1839a4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.618405 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-kube-api-access-6n7mf" (OuterVolumeSpecName: "kube-api-access-6n7mf") pod "bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" (UID: "bc7fd5a9-ac7c-466a-8e30-b046ef1839a4"). InnerVolumeSpecName "kube-api-access-6n7mf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.627640 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-config-data" (OuterVolumeSpecName: "config-data") pod "bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" (UID: "bc7fd5a9-ac7c-466a-8e30-b046ef1839a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.644292 4711 generic.go:334] "Generic (PLEG): container finished" podID="9e6e254a-f600-44ae-8925-6ad10a1220e5" containerID="3542ccc41defdb0e51d72c252143be1bc5895c650d6374de75785663334df6c9" exitCode=0 Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.644369 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" event={"ID":"9e6e254a-f600-44ae-8925-6ad10a1220e5","Type":"ContainerDied","Data":"3542ccc41defdb0e51d72c252143be1bc5895c650d6374de75785663334df6c9"} Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.647812 4711 generic.go:334] "Generic (PLEG): container finished" podID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" containerID="789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7" exitCode=0 Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.647845 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4","Type":"ContainerDied","Data":"789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7"} Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.647871 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"bc7fd5a9-ac7c-466a-8e30-b046ef1839a4","Type":"ContainerDied","Data":"4225ebe0857970ee2226aa2164081074e38bed4bcd55ede0cd2fd8e2000fb11c"} Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.647884 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.647888 4711 scope.go:117] "RemoveContainer" containerID="789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.687573 4711 scope.go:117] "RemoveContainer" containerID="2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.696113 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.711345 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6n7mf\" (UniqueName: \"kubernetes.io/projected/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-kube-api-access-6n7mf\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.711383 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.711395 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.713624 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.719673 4711 scope.go:117] "RemoveContainer" containerID="789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7" Jan 23 08:44:47 crc kubenswrapper[4711]: E0123 08:44:47.720426 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7\": container with ID starting with 789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7 not found: ID does not exist" containerID="789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.720463 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7"} err="failed to get container status \"789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7\": rpc error: code = NotFound desc = could not find container \"789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7\": container with ID starting with 789c6e336ec40bbc23b5e3d4246ebd6e9c8f673cf72627523fa6005d2fa8aed7 not found: ID does not exist" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.720487 4711 scope.go:117] "RemoveContainer" containerID="2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485" Jan 23 08:44:47 crc kubenswrapper[4711]: E0123 08:44:47.721600 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485\": container with ID starting with 2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485 not found: ID does not exist" containerID="2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.721630 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485"} err="failed to get container status \"2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485\": rpc error: code = NotFound desc = could not find container \"2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485\": container with ID starting with 2bbc68d46c105e750888c01e8b7e987c88b4cc753c53928063c874d47b835485 not found: ID does not exist" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.722378 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:44:47 crc kubenswrapper[4711]: E0123 08:44:47.723133 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" containerName="nova-kuttl-metadata-log" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.723161 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" containerName="nova-kuttl-metadata-log" Jan 23 08:44:47 crc kubenswrapper[4711]: E0123 08:44:47.723190 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" containerName="nova-kuttl-metadata-metadata" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.723199 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" containerName="nova-kuttl-metadata-metadata" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.723388 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" containerName="nova-kuttl-metadata-metadata" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.723464 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" containerName="nova-kuttl-metadata-log" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.724445 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.733426 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.742269 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-metadata-config-data" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.798601 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.914724 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d782917-e4f6-44e9-baa7-402c86c6013e-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"2d782917-e4f6-44e9-baa7-402c86c6013e\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.914812 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhbd6\" (UniqueName: \"kubernetes.io/projected/2d782917-e4f6-44e9-baa7-402c86c6013e-kube-api-access-vhbd6\") pod \"nova-kuttl-metadata-0\" (UID: \"2d782917-e4f6-44e9-baa7-402c86c6013e\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:47 crc kubenswrapper[4711]: I0123 08:44:47.914845 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d782917-e4f6-44e9-baa7-402c86c6013e-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"2d782917-e4f6-44e9-baa7-402c86c6013e\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.016544 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d782917-e4f6-44e9-baa7-402c86c6013e-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"2d782917-e4f6-44e9-baa7-402c86c6013e\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.016623 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhbd6\" (UniqueName: \"kubernetes.io/projected/2d782917-e4f6-44e9-baa7-402c86c6013e-kube-api-access-vhbd6\") pod \"nova-kuttl-metadata-0\" (UID: \"2d782917-e4f6-44e9-baa7-402c86c6013e\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.016655 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d782917-e4f6-44e9-baa7-402c86c6013e-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"2d782917-e4f6-44e9-baa7-402c86c6013e\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.016952 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d782917-e4f6-44e9-baa7-402c86c6013e-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"2d782917-e4f6-44e9-baa7-402c86c6013e\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.022654 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d782917-e4f6-44e9-baa7-402c86c6013e-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"2d782917-e4f6-44e9-baa7-402c86c6013e\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.034758 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhbd6\" (UniqueName: \"kubernetes.io/projected/2d782917-e4f6-44e9-baa7-402c86c6013e-kube-api-access-vhbd6\") pod \"nova-kuttl-metadata-0\" (UID: \"2d782917-e4f6-44e9-baa7-402c86c6013e\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.062836 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.488859 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.577359 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.665458 4711 generic.go:334] "Generic (PLEG): container finished" podID="c7e3b82b-d747-491e-895f-1bbea9463e1b" containerID="f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365" exitCode=0 Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.665602 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.665597 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"c7e3b82b-d747-491e-895f-1bbea9463e1b","Type":"ContainerDied","Data":"f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365"} Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.668027 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"c7e3b82b-d747-491e-895f-1bbea9463e1b","Type":"ContainerDied","Data":"88322d7a3bd2997df475694967599722255c465969b12a56093da1b32061aa17"} Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.668068 4711 scope.go:117] "RemoveContainer" containerID="f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.678762 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"3b12f593-a449-4aa0-90a3-eabdd2e57dee","Type":"ContainerStarted","Data":"caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7"} Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.678898 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"3b12f593-a449-4aa0-90a3-eabdd2e57dee","Type":"ContainerStarted","Data":"dd6009abd4e08a18e2f9f93e297199c14474e1afe6aa15c677fd37cdcb77080f"} Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.679789 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"2d782917-e4f6-44e9-baa7-402c86c6013e","Type":"ContainerStarted","Data":"10b078f97b3c4eb4e5fe1f01c25ad4d1b242fc79b78321443f41e473afa45807"} Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.687620 4711 scope.go:117] "RemoveContainer" containerID="b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.700243 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podStartSLOduration=2.7002097259999998 podStartE2EDuration="2.700209726s" podCreationTimestamp="2026-01-23 08:44:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:44:48.694641441 +0000 UTC m=+1474.267597859" watchObservedRunningTime="2026-01-23 08:44:48.700209726 +0000 UTC m=+1474.273166084" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.714482 4711 scope.go:117] "RemoveContainer" containerID="f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365" Jan 23 08:44:48 crc kubenswrapper[4711]: E0123 08:44:48.715111 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365\": container with ID starting with f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365 not found: ID does not exist" containerID="f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.715198 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365"} err="failed to get container status \"f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365\": rpc error: code = NotFound desc = could not find container \"f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365\": container with ID starting with f00c2481e10c8a512f80aee97ea1dbe4fa1de4fb809383f84543970b93ba1365 not found: ID does not exist" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.715261 4711 scope.go:117] "RemoveContainer" containerID="b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56" Jan 23 08:44:48 crc kubenswrapper[4711]: E0123 08:44:48.715848 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56\": container with ID starting with b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56 not found: ID does not exist" containerID="b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.715890 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56"} err="failed to get container status \"b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56\": rpc error: code = NotFound desc = could not find container \"b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56\": container with ID starting with b6cbbb56eb21a181615bbfee71a58ca4cb01f10ddd7de3f2a3c0dad57e65dc56 not found: ID does not exist" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.728770 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7e3b82b-d747-491e-895f-1bbea9463e1b-config-data\") pod \"c7e3b82b-d747-491e-895f-1bbea9463e1b\" (UID: \"c7e3b82b-d747-491e-895f-1bbea9463e1b\") " Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.728908 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7e3b82b-d747-491e-895f-1bbea9463e1b-logs\") pod \"c7e3b82b-d747-491e-895f-1bbea9463e1b\" (UID: \"c7e3b82b-d747-491e-895f-1bbea9463e1b\") " Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.728933 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clw7g\" (UniqueName: \"kubernetes.io/projected/c7e3b82b-d747-491e-895f-1bbea9463e1b-kube-api-access-clw7g\") pod \"c7e3b82b-d747-491e-895f-1bbea9463e1b\" (UID: \"c7e3b82b-d747-491e-895f-1bbea9463e1b\") " Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.729677 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7e3b82b-d747-491e-895f-1bbea9463e1b-logs" (OuterVolumeSpecName: "logs") pod "c7e3b82b-d747-491e-895f-1bbea9463e1b" (UID: "c7e3b82b-d747-491e-895f-1bbea9463e1b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.735556 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7e3b82b-d747-491e-895f-1bbea9463e1b-kube-api-access-clw7g" (OuterVolumeSpecName: "kube-api-access-clw7g") pod "c7e3b82b-d747-491e-895f-1bbea9463e1b" (UID: "c7e3b82b-d747-491e-895f-1bbea9463e1b"). InnerVolumeSpecName "kube-api-access-clw7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.756137 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7e3b82b-d747-491e-895f-1bbea9463e1b-config-data" (OuterVolumeSpecName: "config-data") pod "c7e3b82b-d747-491e-895f-1bbea9463e1b" (UID: "c7e3b82b-d747-491e-895f-1bbea9463e1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.831541 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7e3b82b-d747-491e-895f-1bbea9463e1b-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.831571 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clw7g\" (UniqueName: \"kubernetes.io/projected/c7e3b82b-d747-491e-895f-1bbea9463e1b-kube-api-access-clw7g\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.831588 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7e3b82b-d747-491e-895f-1bbea9463e1b-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:48 crc kubenswrapper[4711]: I0123 08:44:48.946916 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.001779 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.016950 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.028513 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:44:49 crc kubenswrapper[4711]: E0123 08:44:49.028970 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7e3b82b-d747-491e-895f-1bbea9463e1b" containerName="nova-kuttl-api-api" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.030055 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7e3b82b-d747-491e-895f-1bbea9463e1b" containerName="nova-kuttl-api-api" Jan 23 08:44:49 crc kubenswrapper[4711]: E0123 08:44:49.030208 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e6e254a-f600-44ae-8925-6ad10a1220e5" containerName="nova-kuttl-cell1-conductor-db-sync" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.030221 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e6e254a-f600-44ae-8925-6ad10a1220e5" containerName="nova-kuttl-cell1-conductor-db-sync" Jan 23 08:44:49 crc kubenswrapper[4711]: E0123 08:44:49.030240 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7e3b82b-d747-491e-895f-1bbea9463e1b" containerName="nova-kuttl-api-log" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.030249 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7e3b82b-d747-491e-895f-1bbea9463e1b" containerName="nova-kuttl-api-log" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.030710 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e6e254a-f600-44ae-8925-6ad10a1220e5" containerName="nova-kuttl-cell1-conductor-db-sync" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.030734 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7e3b82b-d747-491e-895f-1bbea9463e1b" containerName="nova-kuttl-api-api" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.030759 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7e3b82b-d747-491e-895f-1bbea9463e1b" containerName="nova-kuttl-api-log" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.031894 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.034665 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-api-config-data" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.035624 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8v2v\" (UniqueName: \"kubernetes.io/projected/9e6e254a-f600-44ae-8925-6ad10a1220e5-kube-api-access-w8v2v\") pod \"9e6e254a-f600-44ae-8925-6ad10a1220e5\" (UID: \"9e6e254a-f600-44ae-8925-6ad10a1220e5\") " Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.035662 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e6e254a-f600-44ae-8925-6ad10a1220e5-config-data\") pod \"9e6e254a-f600-44ae-8925-6ad10a1220e5\" (UID: \"9e6e254a-f600-44ae-8925-6ad10a1220e5\") " Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.035769 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e6e254a-f600-44ae-8925-6ad10a1220e5-scripts\") pod \"9e6e254a-f600-44ae-8925-6ad10a1220e5\" (UID: \"9e6e254a-f600-44ae-8925-6ad10a1220e5\") " Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.038573 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.041044 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e6e254a-f600-44ae-8925-6ad10a1220e5-scripts" (OuterVolumeSpecName: "scripts") pod "9e6e254a-f600-44ae-8925-6ad10a1220e5" (UID: "9e6e254a-f600-44ae-8925-6ad10a1220e5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.041719 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e6e254a-f600-44ae-8925-6ad10a1220e5-kube-api-access-w8v2v" (OuterVolumeSpecName: "kube-api-access-w8v2v") pod "9e6e254a-f600-44ae-8925-6ad10a1220e5" (UID: "9e6e254a-f600-44ae-8925-6ad10a1220e5"). InnerVolumeSpecName "kube-api-access-w8v2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.071273 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e6e254a-f600-44ae-8925-6ad10a1220e5-config-data" (OuterVolumeSpecName: "config-data") pod "9e6e254a-f600-44ae-8925-6ad10a1220e5" (UID: "9e6e254a-f600-44ae-8925-6ad10a1220e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.139336 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3575122-fbcd-4969-a48e-7fb5660c20af-config-data\") pod \"nova-kuttl-api-0\" (UID: \"e3575122-fbcd-4969-a48e-7fb5660c20af\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.139390 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85qrr\" (UniqueName: \"kubernetes.io/projected/e3575122-fbcd-4969-a48e-7fb5660c20af-kube-api-access-85qrr\") pod \"nova-kuttl-api-0\" (UID: \"e3575122-fbcd-4969-a48e-7fb5660c20af\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.139442 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3575122-fbcd-4969-a48e-7fb5660c20af-logs\") pod \"nova-kuttl-api-0\" (UID: \"e3575122-fbcd-4969-a48e-7fb5660c20af\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.139544 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e6e254a-f600-44ae-8925-6ad10a1220e5-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.139561 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8v2v\" (UniqueName: \"kubernetes.io/projected/9e6e254a-f600-44ae-8925-6ad10a1220e5-kube-api-access-w8v2v\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.139572 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e6e254a-f600-44ae-8925-6ad10a1220e5-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.241007 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3575122-fbcd-4969-a48e-7fb5660c20af-logs\") pod \"nova-kuttl-api-0\" (UID: \"e3575122-fbcd-4969-a48e-7fb5660c20af\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.241134 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3575122-fbcd-4969-a48e-7fb5660c20af-config-data\") pod \"nova-kuttl-api-0\" (UID: \"e3575122-fbcd-4969-a48e-7fb5660c20af\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.241155 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85qrr\" (UniqueName: \"kubernetes.io/projected/e3575122-fbcd-4969-a48e-7fb5660c20af-kube-api-access-85qrr\") pod \"nova-kuttl-api-0\" (UID: \"e3575122-fbcd-4969-a48e-7fb5660c20af\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.241745 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3575122-fbcd-4969-a48e-7fb5660c20af-logs\") pod \"nova-kuttl-api-0\" (UID: \"e3575122-fbcd-4969-a48e-7fb5660c20af\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.245295 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3575122-fbcd-4969-a48e-7fb5660c20af-config-data\") pod \"nova-kuttl-api-0\" (UID: \"e3575122-fbcd-4969-a48e-7fb5660c20af\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.263922 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85qrr\" (UniqueName: \"kubernetes.io/projected/e3575122-fbcd-4969-a48e-7fb5660c20af-kube-api-access-85qrr\") pod \"nova-kuttl-api-0\" (UID: \"e3575122-fbcd-4969-a48e-7fb5660c20af\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.355561 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.487087 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc7fd5a9-ac7c-466a-8e30-b046ef1839a4" path="/var/lib/kubelet/pods/bc7fd5a9-ac7c-466a-8e30-b046ef1839a4/volumes" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.487848 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7e3b82b-d747-491e-895f-1bbea9463e1b" path="/var/lib/kubelet/pods/c7e3b82b-d747-491e-895f-1bbea9463e1b/volumes" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.702704 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"2d782917-e4f6-44e9-baa7-402c86c6013e","Type":"ContainerStarted","Data":"047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a"} Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.702990 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"2d782917-e4f6-44e9-baa7-402c86c6013e","Type":"ContainerStarted","Data":"88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1"} Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.705400 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" event={"ID":"9e6e254a-f600-44ae-8925-6ad10a1220e5","Type":"ContainerDied","Data":"1172873c3404f589021becfd199c9b61807e0680765cfa2f41a5d3c09f030d14"} Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.705430 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1172873c3404f589021becfd199c9b61807e0680765cfa2f41a5d3c09f030d14" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.705490 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.726025 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-0" podStartSLOduration=2.726003966 podStartE2EDuration="2.726003966s" podCreationTimestamp="2026-01-23 08:44:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:44:49.721791644 +0000 UTC m=+1475.294748012" watchObservedRunningTime="2026-01-23 08:44:49.726003966 +0000 UTC m=+1475.298960334" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.753994 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.756355 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.760322 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-config-data" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.769613 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.805842 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.878567 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r94dp\" (UniqueName: \"kubernetes.io/projected/92ef4f76-c65c-4b02-8cf0-574d68712a48-kube-api-access-r94dp\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"92ef4f76-c65c-4b02-8cf0-574d68712a48\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.878660 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92ef4f76-c65c-4b02-8cf0-574d68712a48-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"92ef4f76-c65c-4b02-8cf0-574d68712a48\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.980114 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r94dp\" (UniqueName: \"kubernetes.io/projected/92ef4f76-c65c-4b02-8cf0-574d68712a48-kube-api-access-r94dp\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"92ef4f76-c65c-4b02-8cf0-574d68712a48\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.980383 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92ef4f76-c65c-4b02-8cf0-574d68712a48-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"92ef4f76-c65c-4b02-8cf0-574d68712a48\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.992969 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92ef4f76-c65c-4b02-8cf0-574d68712a48-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"92ef4f76-c65c-4b02-8cf0-574d68712a48\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:44:49 crc kubenswrapper[4711]: I0123 08:44:49.995090 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r94dp\" (UniqueName: \"kubernetes.io/projected/92ef4f76-c65c-4b02-8cf0-574d68712a48-kube-api-access-r94dp\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"92ef4f76-c65c-4b02-8cf0-574d68712a48\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:44:50 crc kubenswrapper[4711]: I0123 08:44:50.078111 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:44:50 crc kubenswrapper[4711]: I0123 08:44:50.503942 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:44:50 crc kubenswrapper[4711]: W0123 08:44:50.505690 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92ef4f76_c65c_4b02_8cf0_574d68712a48.slice/crio-97e68bd6e9a0457e313d09930b8cdf4bfa7e711dbaab355751d42de27f07dbe5 WatchSource:0}: Error finding container 97e68bd6e9a0457e313d09930b8cdf4bfa7e711dbaab355751d42de27f07dbe5: Status 404 returned error can't find the container with id 97e68bd6e9a0457e313d09930b8cdf4bfa7e711dbaab355751d42de27f07dbe5 Jan 23 08:44:50 crc kubenswrapper[4711]: I0123 08:44:50.721410 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"92ef4f76-c65c-4b02-8cf0-574d68712a48","Type":"ContainerStarted","Data":"957b3eac16d7706343ab805453158385e1efb937792970bb856578b6bd429146"} Jan 23 08:44:50 crc kubenswrapper[4711]: I0123 08:44:50.722650 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:44:50 crc kubenswrapper[4711]: I0123 08:44:50.722691 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"92ef4f76-c65c-4b02-8cf0-574d68712a48","Type":"ContainerStarted","Data":"97e68bd6e9a0457e313d09930b8cdf4bfa7e711dbaab355751d42de27f07dbe5"} Jan 23 08:44:50 crc kubenswrapper[4711]: I0123 08:44:50.723620 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"e3575122-fbcd-4969-a48e-7fb5660c20af","Type":"ContainerStarted","Data":"d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d"} Jan 23 08:44:50 crc kubenswrapper[4711]: I0123 08:44:50.723663 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"e3575122-fbcd-4969-a48e-7fb5660c20af","Type":"ContainerStarted","Data":"2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa"} Jan 23 08:44:50 crc kubenswrapper[4711]: I0123 08:44:50.723676 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"e3575122-fbcd-4969-a48e-7fb5660c20af","Type":"ContainerStarted","Data":"6bc01566fba1427fa5b2a704ef7a1f9d26b01f798629bb6c38c6e277e7c8fa0d"} Jan 23 08:44:50 crc kubenswrapper[4711]: I0123 08:44:50.742366 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" podStartSLOduration=1.7423488470000001 podStartE2EDuration="1.742348847s" podCreationTimestamp="2026-01-23 08:44:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:44:50.740655986 +0000 UTC m=+1476.313612364" watchObservedRunningTime="2026-01-23 08:44:50.742348847 +0000 UTC m=+1476.315305215" Jan 23 08:44:50 crc kubenswrapper[4711]: I0123 08:44:50.762438 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-0" podStartSLOduration=1.762417226 podStartE2EDuration="1.762417226s" podCreationTimestamp="2026-01-23 08:44:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:44:50.759142246 +0000 UTC m=+1476.332098614" watchObservedRunningTime="2026-01-23 08:44:50.762417226 +0000 UTC m=+1476.335373594" Jan 23 08:44:52 crc kubenswrapper[4711]: I0123 08:44:52.328235 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:53 crc kubenswrapper[4711]: I0123 08:44:53.063980 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:53 crc kubenswrapper[4711]: I0123 08:44:53.064034 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.105982 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.561728 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6"] Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.562925 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.565145 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-manage-config-data" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.566183 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-manage-scripts" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.570292 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6"] Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.667457 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f49a860-f013-4823-864f-0e09c4d1211c-scripts\") pod \"nova-kuttl-cell1-cell-mapping-nr2b6\" (UID: \"7f49a860-f013-4823-864f-0e09c4d1211c\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.667561 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f49a860-f013-4823-864f-0e09c4d1211c-config-data\") pod \"nova-kuttl-cell1-cell-mapping-nr2b6\" (UID: \"7f49a860-f013-4823-864f-0e09c4d1211c\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.667667 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwsjp\" (UniqueName: \"kubernetes.io/projected/7f49a860-f013-4823-864f-0e09c4d1211c-kube-api-access-wwsjp\") pod \"nova-kuttl-cell1-cell-mapping-nr2b6\" (UID: \"7f49a860-f013-4823-864f-0e09c4d1211c\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.768844 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f49a860-f013-4823-864f-0e09c4d1211c-scripts\") pod \"nova-kuttl-cell1-cell-mapping-nr2b6\" (UID: \"7f49a860-f013-4823-864f-0e09c4d1211c\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.769112 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f49a860-f013-4823-864f-0e09c4d1211c-config-data\") pod \"nova-kuttl-cell1-cell-mapping-nr2b6\" (UID: \"7f49a860-f013-4823-864f-0e09c4d1211c\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.769185 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwsjp\" (UniqueName: \"kubernetes.io/projected/7f49a860-f013-4823-864f-0e09c4d1211c-kube-api-access-wwsjp\") pod \"nova-kuttl-cell1-cell-mapping-nr2b6\" (UID: \"7f49a860-f013-4823-864f-0e09c4d1211c\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.782930 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f49a860-f013-4823-864f-0e09c4d1211c-scripts\") pod \"nova-kuttl-cell1-cell-mapping-nr2b6\" (UID: \"7f49a860-f013-4823-864f-0e09c4d1211c\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.783765 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f49a860-f013-4823-864f-0e09c4d1211c-config-data\") pod \"nova-kuttl-cell1-cell-mapping-nr2b6\" (UID: \"7f49a860-f013-4823-864f-0e09c4d1211c\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.785832 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwsjp\" (UniqueName: \"kubernetes.io/projected/7f49a860-f013-4823-864f-0e09c4d1211c-kube-api-access-wwsjp\") pod \"nova-kuttl-cell1-cell-mapping-nr2b6\" (UID: \"7f49a860-f013-4823-864f-0e09c4d1211c\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.890313 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.993961 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.994282 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.994334 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.995090 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"509b113ae3fd960091847020bbc2a0f41a3fb8b6e06cdd9e7afa31b3382efa17"} pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 23 08:44:55 crc kubenswrapper[4711]: I0123 08:44:55.995151 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" containerID="cri-o://509b113ae3fd960091847020bbc2a0f41a3fb8b6e06cdd9e7afa31b3382efa17" gracePeriod=600 Jan 23 08:44:56 crc kubenswrapper[4711]: I0123 08:44:56.305218 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6"] Jan 23 08:44:56 crc kubenswrapper[4711]: W0123 08:44:56.311144 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f49a860_f013_4823_864f_0e09c4d1211c.slice/crio-7daaef35efdeecac6f756234a6a6b5f6e562cc98668aae2f8ea3b308e58de9c2 WatchSource:0}: Error finding container 7daaef35efdeecac6f756234a6a6b5f6e562cc98668aae2f8ea3b308e58de9c2: Status 404 returned error can't find the container with id 7daaef35efdeecac6f756234a6a6b5f6e562cc98668aae2f8ea3b308e58de9c2 Jan 23 08:44:56 crc kubenswrapper[4711]: I0123 08:44:56.773927 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" event={"ID":"7f49a860-f013-4823-864f-0e09c4d1211c","Type":"ContainerStarted","Data":"7daaef35efdeecac6f756234a6a6b5f6e562cc98668aae2f8ea3b308e58de9c2"} Jan 23 08:44:58 crc kubenswrapper[4711]: I0123 08:44:57.328379 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:58 crc kubenswrapper[4711]: I0123 08:44:57.358770 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:58 crc kubenswrapper[4711]: I0123 08:44:57.784118 4711 generic.go:334] "Generic (PLEG): container finished" podID="3846d4e0-cfda-4e0b-8747-85267de12736" containerID="509b113ae3fd960091847020bbc2a0f41a3fb8b6e06cdd9e7afa31b3382efa17" exitCode=0 Jan 23 08:44:58 crc kubenswrapper[4711]: I0123 08:44:57.784181 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerDied","Data":"509b113ae3fd960091847020bbc2a0f41a3fb8b6e06cdd9e7afa31b3382efa17"} Jan 23 08:44:58 crc kubenswrapper[4711]: I0123 08:44:57.784256 4711 scope.go:117] "RemoveContainer" containerID="9ef6df8407452842a81bff9ff371dec1b0be0a97894fe9cf1da32e295f2f3558" Jan 23 08:44:58 crc kubenswrapper[4711]: I0123 08:44:57.807329 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:44:58 crc kubenswrapper[4711]: I0123 08:44:58.064019 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:58 crc kubenswrapper[4711]: I0123 08:44:58.064080 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:44:58 crc kubenswrapper[4711]: I0123 08:44:58.793809 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" event={"ID":"7f49a860-f013-4823-864f-0e09c4d1211c","Type":"ContainerStarted","Data":"ed7b60ce9b29bee0a8312b2bc86772c7b05819903c8a4a5250a2ea2367f849d7"} Jan 23 08:44:59 crc kubenswrapper[4711]: I0123 08:44:59.146798 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="2d782917-e4f6-44e9-baa7-402c86c6013e" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.138:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:44:59 crc kubenswrapper[4711]: I0123 08:44:59.146790 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="2d782917-e4f6-44e9-baa7-402c86c6013e" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.138:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:44:59 crc kubenswrapper[4711]: I0123 08:44:59.356885 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:59 crc kubenswrapper[4711]: I0123 08:44:59.356955 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:44:59 crc kubenswrapper[4711]: I0123 08:44:59.802494 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058"} Jan 23 08:44:59 crc kubenswrapper[4711]: I0123 08:44:59.843111 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" podStartSLOduration=4.843088296 podStartE2EDuration="4.843088296s" podCreationTimestamp="2026-01-23 08:44:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:44:59.835228804 +0000 UTC m=+1485.408185172" watchObservedRunningTime="2026-01-23 08:44:59.843088296 +0000 UTC m=+1485.416044664" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.139882 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp"] Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.141393 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.143782 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.151409 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.159977 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp"] Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.243804 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea9a5cca-88df-4697-adab-48a747b72560-config-volume\") pod \"collect-profiles-29485965-4g2lp\" (UID: \"ea9a5cca-88df-4697-adab-48a747b72560\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.243918 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6cx9\" (UniqueName: \"kubernetes.io/projected/ea9a5cca-88df-4697-adab-48a747b72560-kube-api-access-m6cx9\") pod \"collect-profiles-29485965-4g2lp\" (UID: \"ea9a5cca-88df-4697-adab-48a747b72560\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.243969 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea9a5cca-88df-4697-adab-48a747b72560-secret-volume\") pod \"collect-profiles-29485965-4g2lp\" (UID: \"ea9a5cca-88df-4697-adab-48a747b72560\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.345301 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea9a5cca-88df-4697-adab-48a747b72560-config-volume\") pod \"collect-profiles-29485965-4g2lp\" (UID: \"ea9a5cca-88df-4697-adab-48a747b72560\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.345406 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6cx9\" (UniqueName: \"kubernetes.io/projected/ea9a5cca-88df-4697-adab-48a747b72560-kube-api-access-m6cx9\") pod \"collect-profiles-29485965-4g2lp\" (UID: \"ea9a5cca-88df-4697-adab-48a747b72560\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.345443 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea9a5cca-88df-4697-adab-48a747b72560-secret-volume\") pod \"collect-profiles-29485965-4g2lp\" (UID: \"ea9a5cca-88df-4697-adab-48a747b72560\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.346429 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea9a5cca-88df-4697-adab-48a747b72560-config-volume\") pod \"collect-profiles-29485965-4g2lp\" (UID: \"ea9a5cca-88df-4697-adab-48a747b72560\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.358827 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea9a5cca-88df-4697-adab-48a747b72560-secret-volume\") pod \"collect-profiles-29485965-4g2lp\" (UID: \"ea9a5cca-88df-4697-adab-48a747b72560\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.367337 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6cx9\" (UniqueName: \"kubernetes.io/projected/ea9a5cca-88df-4697-adab-48a747b72560-kube-api-access-m6cx9\") pod \"collect-profiles-29485965-4g2lp\" (UID: \"ea9a5cca-88df-4697-adab-48a747b72560\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.399764 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="e3575122-fbcd-4969-a48e-7fb5660c20af" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.139:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.399764 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="e3575122-fbcd-4969-a48e-7fb5660c20af" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.139:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.480785 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:00 crc kubenswrapper[4711]: I0123 08:45:00.928930 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp"] Jan 23 08:45:01 crc kubenswrapper[4711]: I0123 08:45:01.817191 4711 generic.go:334] "Generic (PLEG): container finished" podID="ea9a5cca-88df-4697-adab-48a747b72560" containerID="e20ce9b128ea4569e034213e519ee0288101d7426ab76943c09dddff573c0f04" exitCode=0 Jan 23 08:45:01 crc kubenswrapper[4711]: I0123 08:45:01.817254 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" event={"ID":"ea9a5cca-88df-4697-adab-48a747b72560","Type":"ContainerDied","Data":"e20ce9b128ea4569e034213e519ee0288101d7426ab76943c09dddff573c0f04"} Jan 23 08:45:01 crc kubenswrapper[4711]: I0123 08:45:01.817323 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" event={"ID":"ea9a5cca-88df-4697-adab-48a747b72560","Type":"ContainerStarted","Data":"594e486964b72449a819c2f78b4bba7367ce41bfabbce12fb7bb2ab240aa614e"} Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.175295 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.198904 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea9a5cca-88df-4697-adab-48a747b72560-secret-volume\") pod \"ea9a5cca-88df-4697-adab-48a747b72560\" (UID: \"ea9a5cca-88df-4697-adab-48a747b72560\") " Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.199019 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6cx9\" (UniqueName: \"kubernetes.io/projected/ea9a5cca-88df-4697-adab-48a747b72560-kube-api-access-m6cx9\") pod \"ea9a5cca-88df-4697-adab-48a747b72560\" (UID: \"ea9a5cca-88df-4697-adab-48a747b72560\") " Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.199050 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea9a5cca-88df-4697-adab-48a747b72560-config-volume\") pod \"ea9a5cca-88df-4697-adab-48a747b72560\" (UID: \"ea9a5cca-88df-4697-adab-48a747b72560\") " Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.199895 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea9a5cca-88df-4697-adab-48a747b72560-config-volume" (OuterVolumeSpecName: "config-volume") pod "ea9a5cca-88df-4697-adab-48a747b72560" (UID: "ea9a5cca-88df-4697-adab-48a747b72560"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.205348 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea9a5cca-88df-4697-adab-48a747b72560-kube-api-access-m6cx9" (OuterVolumeSpecName: "kube-api-access-m6cx9") pod "ea9a5cca-88df-4697-adab-48a747b72560" (UID: "ea9a5cca-88df-4697-adab-48a747b72560"). InnerVolumeSpecName "kube-api-access-m6cx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.206013 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea9a5cca-88df-4697-adab-48a747b72560-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ea9a5cca-88df-4697-adab-48a747b72560" (UID: "ea9a5cca-88df-4697-adab-48a747b72560"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.300570 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea9a5cca-88df-4697-adab-48a747b72560-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.300603 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6cx9\" (UniqueName: \"kubernetes.io/projected/ea9a5cca-88df-4697-adab-48a747b72560-kube-api-access-m6cx9\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.300612 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea9a5cca-88df-4697-adab-48a747b72560-config-volume\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.831798 4711 generic.go:334] "Generic (PLEG): container finished" podID="7f49a860-f013-4823-864f-0e09c4d1211c" containerID="ed7b60ce9b29bee0a8312b2bc86772c7b05819903c8a4a5250a2ea2367f849d7" exitCode=0 Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.831863 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" event={"ID":"7f49a860-f013-4823-864f-0e09c4d1211c","Type":"ContainerDied","Data":"ed7b60ce9b29bee0a8312b2bc86772c7b05819903c8a4a5250a2ea2367f849d7"} Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.835460 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" event={"ID":"ea9a5cca-88df-4697-adab-48a747b72560","Type":"ContainerDied","Data":"594e486964b72449a819c2f78b4bba7367ce41bfabbce12fb7bb2ab240aa614e"} Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.835516 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="594e486964b72449a819c2f78b4bba7367ce41bfabbce12fb7bb2ab240aa614e" Jan 23 08:45:03 crc kubenswrapper[4711]: I0123 08:45:03.835521 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485965-4g2lp" Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.159818 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.231684 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f49a860-f013-4823-864f-0e09c4d1211c-config-data\") pod \"7f49a860-f013-4823-864f-0e09c4d1211c\" (UID: \"7f49a860-f013-4823-864f-0e09c4d1211c\") " Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.231742 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f49a860-f013-4823-864f-0e09c4d1211c-scripts\") pod \"7f49a860-f013-4823-864f-0e09c4d1211c\" (UID: \"7f49a860-f013-4823-864f-0e09c4d1211c\") " Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.231800 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwsjp\" (UniqueName: \"kubernetes.io/projected/7f49a860-f013-4823-864f-0e09c4d1211c-kube-api-access-wwsjp\") pod \"7f49a860-f013-4823-864f-0e09c4d1211c\" (UID: \"7f49a860-f013-4823-864f-0e09c4d1211c\") " Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.238021 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f49a860-f013-4823-864f-0e09c4d1211c-scripts" (OuterVolumeSpecName: "scripts") pod "7f49a860-f013-4823-864f-0e09c4d1211c" (UID: "7f49a860-f013-4823-864f-0e09c4d1211c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.238565 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f49a860-f013-4823-864f-0e09c4d1211c-kube-api-access-wwsjp" (OuterVolumeSpecName: "kube-api-access-wwsjp") pod "7f49a860-f013-4823-864f-0e09c4d1211c" (UID: "7f49a860-f013-4823-864f-0e09c4d1211c"). InnerVolumeSpecName "kube-api-access-wwsjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.254129 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f49a860-f013-4823-864f-0e09c4d1211c-config-data" (OuterVolumeSpecName: "config-data") pod "7f49a860-f013-4823-864f-0e09c4d1211c" (UID: "7f49a860-f013-4823-864f-0e09c4d1211c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.333405 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f49a860-f013-4823-864f-0e09c4d1211c-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.333445 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f49a860-f013-4823-864f-0e09c4d1211c-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.333461 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwsjp\" (UniqueName: \"kubernetes.io/projected/7f49a860-f013-4823-864f-0e09c4d1211c-kube-api-access-wwsjp\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.854139 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" event={"ID":"7f49a860-f013-4823-864f-0e09c4d1211c","Type":"ContainerDied","Data":"7daaef35efdeecac6f756234a6a6b5f6e562cc98668aae2f8ea3b308e58de9c2"} Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.854177 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7daaef35efdeecac6f756234a6a6b5f6e562cc98668aae2f8ea3b308e58de9c2" Jan 23 08:45:05 crc kubenswrapper[4711]: I0123 08:45:05.854199 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6" Jan 23 08:45:06 crc kubenswrapper[4711]: I0123 08:45:06.035091 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:45:06 crc kubenswrapper[4711]: I0123 08:45:06.035388 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="e3575122-fbcd-4969-a48e-7fb5660c20af" containerName="nova-kuttl-api-log" containerID="cri-o://2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa" gracePeriod=30 Jan 23 08:45:06 crc kubenswrapper[4711]: I0123 08:45:06.035542 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="e3575122-fbcd-4969-a48e-7fb5660c20af" containerName="nova-kuttl-api-api" containerID="cri-o://d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d" gracePeriod=30 Jan 23 08:45:06 crc kubenswrapper[4711]: I0123 08:45:06.052691 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:45:06 crc kubenswrapper[4711]: I0123 08:45:06.053321 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="3b12f593-a449-4aa0-90a3-eabdd2e57dee" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7" gracePeriod=30 Jan 23 08:45:06 crc kubenswrapper[4711]: I0123 08:45:06.162293 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:45:06 crc kubenswrapper[4711]: I0123 08:45:06.162592 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="2d782917-e4f6-44e9-baa7-402c86c6013e" containerName="nova-kuttl-metadata-log" containerID="cri-o://88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1" gracePeriod=30 Jan 23 08:45:06 crc kubenswrapper[4711]: I0123 08:45:06.162723 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="2d782917-e4f6-44e9-baa7-402c86c6013e" containerName="nova-kuttl-metadata-metadata" containerID="cri-o://047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a" gracePeriod=30 Jan 23 08:45:06 crc kubenswrapper[4711]: I0123 08:45:06.863134 4711 generic.go:334] "Generic (PLEG): container finished" podID="2d782917-e4f6-44e9-baa7-402c86c6013e" containerID="88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1" exitCode=143 Jan 23 08:45:06 crc kubenswrapper[4711]: I0123 08:45:06.863223 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"2d782917-e4f6-44e9-baa7-402c86c6013e","Type":"ContainerDied","Data":"88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1"} Jan 23 08:45:06 crc kubenswrapper[4711]: I0123 08:45:06.865371 4711 generic.go:334] "Generic (PLEG): container finished" podID="e3575122-fbcd-4969-a48e-7fb5660c20af" containerID="2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa" exitCode=143 Jan 23 08:45:06 crc kubenswrapper[4711]: I0123 08:45:06.865437 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"e3575122-fbcd-4969-a48e-7fb5660c20af","Type":"ContainerDied","Data":"2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa"} Jan 23 08:45:07 crc kubenswrapper[4711]: E0123 08:45:07.330670 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:45:07 crc kubenswrapper[4711]: E0123 08:45:07.332353 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:45:07 crc kubenswrapper[4711]: E0123 08:45:07.333454 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:45:07 crc kubenswrapper[4711]: E0123 08:45:07.333515 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="3b12f593-a449-4aa0-90a3-eabdd2e57dee" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.681820 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.689860 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.694858 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.703441 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d782917-e4f6-44e9-baa7-402c86c6013e-config-data\") pod \"2d782917-e4f6-44e9-baa7-402c86c6013e\" (UID: \"2d782917-e4f6-44e9-baa7-402c86c6013e\") " Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.703595 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3575122-fbcd-4969-a48e-7fb5660c20af-config-data\") pod \"e3575122-fbcd-4969-a48e-7fb5660c20af\" (UID: \"e3575122-fbcd-4969-a48e-7fb5660c20af\") " Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.703617 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85qrr\" (UniqueName: \"kubernetes.io/projected/e3575122-fbcd-4969-a48e-7fb5660c20af-kube-api-access-85qrr\") pod \"e3575122-fbcd-4969-a48e-7fb5660c20af\" (UID: \"e3575122-fbcd-4969-a48e-7fb5660c20af\") " Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.703669 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b12f593-a449-4aa0-90a3-eabdd2e57dee-config-data\") pod \"3b12f593-a449-4aa0-90a3-eabdd2e57dee\" (UID: \"3b12f593-a449-4aa0-90a3-eabdd2e57dee\") " Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.703714 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhbd6\" (UniqueName: \"kubernetes.io/projected/2d782917-e4f6-44e9-baa7-402c86c6013e-kube-api-access-vhbd6\") pod \"2d782917-e4f6-44e9-baa7-402c86c6013e\" (UID: \"2d782917-e4f6-44e9-baa7-402c86c6013e\") " Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.703738 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zns82\" (UniqueName: \"kubernetes.io/projected/3b12f593-a449-4aa0-90a3-eabdd2e57dee-kube-api-access-zns82\") pod \"3b12f593-a449-4aa0-90a3-eabdd2e57dee\" (UID: \"3b12f593-a449-4aa0-90a3-eabdd2e57dee\") " Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.703756 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3575122-fbcd-4969-a48e-7fb5660c20af-logs\") pod \"e3575122-fbcd-4969-a48e-7fb5660c20af\" (UID: \"e3575122-fbcd-4969-a48e-7fb5660c20af\") " Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.703773 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d782917-e4f6-44e9-baa7-402c86c6013e-logs\") pod \"2d782917-e4f6-44e9-baa7-402c86c6013e\" (UID: \"2d782917-e4f6-44e9-baa7-402c86c6013e\") " Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.704471 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d782917-e4f6-44e9-baa7-402c86c6013e-logs" (OuterVolumeSpecName: "logs") pod "2d782917-e4f6-44e9-baa7-402c86c6013e" (UID: "2d782917-e4f6-44e9-baa7-402c86c6013e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.708574 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3575122-fbcd-4969-a48e-7fb5660c20af-logs" (OuterVolumeSpecName: "logs") pod "e3575122-fbcd-4969-a48e-7fb5660c20af" (UID: "e3575122-fbcd-4969-a48e-7fb5660c20af"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.711280 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b12f593-a449-4aa0-90a3-eabdd2e57dee-kube-api-access-zns82" (OuterVolumeSpecName: "kube-api-access-zns82") pod "3b12f593-a449-4aa0-90a3-eabdd2e57dee" (UID: "3b12f593-a449-4aa0-90a3-eabdd2e57dee"). InnerVolumeSpecName "kube-api-access-zns82". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.712165 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3575122-fbcd-4969-a48e-7fb5660c20af-kube-api-access-85qrr" (OuterVolumeSpecName: "kube-api-access-85qrr") pod "e3575122-fbcd-4969-a48e-7fb5660c20af" (UID: "e3575122-fbcd-4969-a48e-7fb5660c20af"). InnerVolumeSpecName "kube-api-access-85qrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.716153 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d782917-e4f6-44e9-baa7-402c86c6013e-kube-api-access-vhbd6" (OuterVolumeSpecName: "kube-api-access-vhbd6") pod "2d782917-e4f6-44e9-baa7-402c86c6013e" (UID: "2d782917-e4f6-44e9-baa7-402c86c6013e"). InnerVolumeSpecName "kube-api-access-vhbd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.734225 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3575122-fbcd-4969-a48e-7fb5660c20af-config-data" (OuterVolumeSpecName: "config-data") pod "e3575122-fbcd-4969-a48e-7fb5660c20af" (UID: "e3575122-fbcd-4969-a48e-7fb5660c20af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.741968 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b12f593-a449-4aa0-90a3-eabdd2e57dee-config-data" (OuterVolumeSpecName: "config-data") pod "3b12f593-a449-4aa0-90a3-eabdd2e57dee" (UID: "3b12f593-a449-4aa0-90a3-eabdd2e57dee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.773032 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d782917-e4f6-44e9-baa7-402c86c6013e-config-data" (OuterVolumeSpecName: "config-data") pod "2d782917-e4f6-44e9-baa7-402c86c6013e" (UID: "2d782917-e4f6-44e9-baa7-402c86c6013e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.805781 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3575122-fbcd-4969-a48e-7fb5660c20af-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.805811 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85qrr\" (UniqueName: \"kubernetes.io/projected/e3575122-fbcd-4969-a48e-7fb5660c20af-kube-api-access-85qrr\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.805823 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b12f593-a449-4aa0-90a3-eabdd2e57dee-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.805832 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhbd6\" (UniqueName: \"kubernetes.io/projected/2d782917-e4f6-44e9-baa7-402c86c6013e-kube-api-access-vhbd6\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.805840 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zns82\" (UniqueName: \"kubernetes.io/projected/3b12f593-a449-4aa0-90a3-eabdd2e57dee-kube-api-access-zns82\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.805849 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3575122-fbcd-4969-a48e-7fb5660c20af-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.805858 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d782917-e4f6-44e9-baa7-402c86c6013e-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.805866 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d782917-e4f6-44e9-baa7-402c86c6013e-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.891036 4711 generic.go:334] "Generic (PLEG): container finished" podID="3b12f593-a449-4aa0-90a3-eabdd2e57dee" containerID="caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7" exitCode=0 Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.891111 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"3b12f593-a449-4aa0-90a3-eabdd2e57dee","Type":"ContainerDied","Data":"caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7"} Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.891675 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"3b12f593-a449-4aa0-90a3-eabdd2e57dee","Type":"ContainerDied","Data":"dd6009abd4e08a18e2f9f93e297199c14474e1afe6aa15c677fd37cdcb77080f"} Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.891125 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.891714 4711 scope.go:117] "RemoveContainer" containerID="caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.895799 4711 generic.go:334] "Generic (PLEG): container finished" podID="2d782917-e4f6-44e9-baa7-402c86c6013e" containerID="047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a" exitCode=0 Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.895861 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"2d782917-e4f6-44e9-baa7-402c86c6013e","Type":"ContainerDied","Data":"047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a"} Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.895886 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"2d782917-e4f6-44e9-baa7-402c86c6013e","Type":"ContainerDied","Data":"10b078f97b3c4eb4e5fe1f01c25ad4d1b242fc79b78321443f41e473afa45807"} Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.895941 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.898412 4711 generic.go:334] "Generic (PLEG): container finished" podID="e3575122-fbcd-4969-a48e-7fb5660c20af" containerID="d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d" exitCode=0 Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.898452 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"e3575122-fbcd-4969-a48e-7fb5660c20af","Type":"ContainerDied","Data":"d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d"} Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.898469 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.898483 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"e3575122-fbcd-4969-a48e-7fb5660c20af","Type":"ContainerDied","Data":"6bc01566fba1427fa5b2a704ef7a1f9d26b01f798629bb6c38c6e277e7c8fa0d"} Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.916103 4711 scope.go:117] "RemoveContainer" containerID="caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7" Jan 23 08:45:09 crc kubenswrapper[4711]: E0123 08:45:09.916448 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7\": container with ID starting with caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7 not found: ID does not exist" containerID="caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.916475 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7"} err="failed to get container status \"caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7\": rpc error: code = NotFound desc = could not find container \"caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7\": container with ID starting with caf510aa61b34fdecfc8d38685fcb2c1937ac21df5902ad1b0dd70a671bee7d7 not found: ID does not exist" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.916522 4711 scope.go:117] "RemoveContainer" containerID="047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.933629 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.940924 4711 scope.go:117] "RemoveContainer" containerID="88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.956894 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.968647 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.969426 4711 scope.go:117] "RemoveContainer" containerID="047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a" Jan 23 08:45:09 crc kubenswrapper[4711]: E0123 08:45:09.969860 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a\": container with ID starting with 047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a not found: ID does not exist" containerID="047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.969889 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a"} err="failed to get container status \"047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a\": rpc error: code = NotFound desc = could not find container \"047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a\": container with ID starting with 047d2aec931c5ecd051c9459dd06adf45a67679f9cc9e1f2915f8658e1d8e27a not found: ID does not exist" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.969909 4711 scope.go:117] "RemoveContainer" containerID="88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1" Jan 23 08:45:09 crc kubenswrapper[4711]: E0123 08:45:09.970599 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1\": container with ID starting with 88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1 not found: ID does not exist" containerID="88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.970622 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1"} err="failed to get container status \"88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1\": rpc error: code = NotFound desc = could not find container \"88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1\": container with ID starting with 88aeccc962b3004b95e9da36d4625f508137ad70b885726c584ab925a1bceff1 not found: ID does not exist" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.970636 4711 scope.go:117] "RemoveContainer" containerID="d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.977891 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.985728 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:45:09 crc kubenswrapper[4711]: E0123 08:45:09.986280 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d782917-e4f6-44e9-baa7-402c86c6013e" containerName="nova-kuttl-metadata-log" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986322 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d782917-e4f6-44e9-baa7-402c86c6013e" containerName="nova-kuttl-metadata-log" Jan 23 08:45:09 crc kubenswrapper[4711]: E0123 08:45:09.986334 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b12f593-a449-4aa0-90a3-eabdd2e57dee" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986340 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b12f593-a449-4aa0-90a3-eabdd2e57dee" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:45:09 crc kubenswrapper[4711]: E0123 08:45:09.986354 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f49a860-f013-4823-864f-0e09c4d1211c" containerName="nova-manage" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986362 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f49a860-f013-4823-864f-0e09c4d1211c" containerName="nova-manage" Jan 23 08:45:09 crc kubenswrapper[4711]: E0123 08:45:09.986377 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3575122-fbcd-4969-a48e-7fb5660c20af" containerName="nova-kuttl-api-api" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986411 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3575122-fbcd-4969-a48e-7fb5660c20af" containerName="nova-kuttl-api-api" Jan 23 08:45:09 crc kubenswrapper[4711]: E0123 08:45:09.986425 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3575122-fbcd-4969-a48e-7fb5660c20af" containerName="nova-kuttl-api-log" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986433 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3575122-fbcd-4969-a48e-7fb5660c20af" containerName="nova-kuttl-api-log" Jan 23 08:45:09 crc kubenswrapper[4711]: E0123 08:45:09.986443 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d782917-e4f6-44e9-baa7-402c86c6013e" containerName="nova-kuttl-metadata-metadata" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986450 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d782917-e4f6-44e9-baa7-402c86c6013e" containerName="nova-kuttl-metadata-metadata" Jan 23 08:45:09 crc kubenswrapper[4711]: E0123 08:45:09.986523 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea9a5cca-88df-4697-adab-48a747b72560" containerName="collect-profiles" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986536 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea9a5cca-88df-4697-adab-48a747b72560" containerName="collect-profiles" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986731 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d782917-e4f6-44e9-baa7-402c86c6013e" containerName="nova-kuttl-metadata-metadata" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986762 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f49a860-f013-4823-864f-0e09c4d1211c" containerName="nova-manage" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986773 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea9a5cca-88df-4697-adab-48a747b72560" containerName="collect-profiles" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986786 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b12f593-a449-4aa0-90a3-eabdd2e57dee" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986796 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d782917-e4f6-44e9-baa7-402c86c6013e" containerName="nova-kuttl-metadata-log" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986804 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3575122-fbcd-4969-a48e-7fb5660c20af" containerName="nova-kuttl-api-api" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.986813 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3575122-fbcd-4969-a48e-7fb5660c20af" containerName="nova-kuttl-api-log" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.989176 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.996142 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-scheduler-config-data" Jan 23 08:45:09 crc kubenswrapper[4711]: I0123 08:45:09.996850 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.005637 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.006183 4711 scope.go:117] "RemoveContainer" containerID="2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.007649 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6dwq\" (UniqueName: \"kubernetes.io/projected/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a-kube-api-access-j6dwq\") pod \"nova-kuttl-scheduler-0\" (UID: \"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.007730 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.017387 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.018773 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.023648 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.033846 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-metadata-config-data" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.042164 4711 scope.go:117] "RemoveContainer" containerID="d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d" Jan 23 08:45:10 crc kubenswrapper[4711]: E0123 08:45:10.042668 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d\": container with ID starting with d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d not found: ID does not exist" containerID="d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.042719 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d"} err="failed to get container status \"d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d\": rpc error: code = NotFound desc = could not find container \"d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d\": container with ID starting with d7622962c7fb29d8579cf8fdc8b6320892bc93a11aa583e22b2c86261a1d249d not found: ID does not exist" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.042745 4711 scope.go:117] "RemoveContainer" containerID="2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa" Jan 23 08:45:10 crc kubenswrapper[4711]: E0123 08:45:10.044312 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa\": container with ID starting with 2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa not found: ID does not exist" containerID="2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.044368 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa"} err="failed to get container status \"2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa\": rpc error: code = NotFound desc = could not find container \"2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa\": container with ID starting with 2d81721bf3cc308deff0de20f3e3feee41904c3a616918043a7bf1dec05ffcfa not found: ID does not exist" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.054285 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.056471 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.058932 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-api-config-data" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.061498 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.069072 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.109527 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6dwq\" (UniqueName: \"kubernetes.io/projected/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a-kube-api-access-j6dwq\") pod \"nova-kuttl-scheduler-0\" (UID: \"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.110277 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.110360 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55nnb\" (UniqueName: \"kubernetes.io/projected/4d962003-fa65-47c9-8ddf-78bfd9e0f514-kube-api-access-55nnb\") pod \"nova-kuttl-api-0\" (UID: \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.110387 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d962003-fa65-47c9-8ddf-78bfd9e0f514-config-data\") pod \"nova-kuttl-api-0\" (UID: \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.110409 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d962003-fa65-47c9-8ddf-78bfd9e0f514-logs\") pod \"nova-kuttl-api-0\" (UID: \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.110687 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96c2ff5d-b1a5-4987-8c97-83550fa5752c-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.110762 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c2ff5d-b1a5-4987-8c97-83550fa5752c-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.110815 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lh6j\" (UniqueName: \"kubernetes.io/projected/96c2ff5d-b1a5-4987-8c97-83550fa5752c-kube-api-access-6lh6j\") pod \"nova-kuttl-metadata-0\" (UID: \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.117249 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.127062 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6dwq\" (UniqueName: \"kubernetes.io/projected/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a-kube-api-access-j6dwq\") pod \"nova-kuttl-scheduler-0\" (UID: \"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.211531 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d962003-fa65-47c9-8ddf-78bfd9e0f514-logs\") pod \"nova-kuttl-api-0\" (UID: \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.211614 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96c2ff5d-b1a5-4987-8c97-83550fa5752c-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.211636 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c2ff5d-b1a5-4987-8c97-83550fa5752c-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.211663 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lh6j\" (UniqueName: \"kubernetes.io/projected/96c2ff5d-b1a5-4987-8c97-83550fa5752c-kube-api-access-6lh6j\") pod \"nova-kuttl-metadata-0\" (UID: \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.211729 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55nnb\" (UniqueName: \"kubernetes.io/projected/4d962003-fa65-47c9-8ddf-78bfd9e0f514-kube-api-access-55nnb\") pod \"nova-kuttl-api-0\" (UID: \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.211751 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d962003-fa65-47c9-8ddf-78bfd9e0f514-config-data\") pod \"nova-kuttl-api-0\" (UID: \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.212161 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96c2ff5d-b1a5-4987-8c97-83550fa5752c-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.212845 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d962003-fa65-47c9-8ddf-78bfd9e0f514-logs\") pod \"nova-kuttl-api-0\" (UID: \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.215926 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d962003-fa65-47c9-8ddf-78bfd9e0f514-config-data\") pod \"nova-kuttl-api-0\" (UID: \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.217371 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c2ff5d-b1a5-4987-8c97-83550fa5752c-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.227205 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55nnb\" (UniqueName: \"kubernetes.io/projected/4d962003-fa65-47c9-8ddf-78bfd9e0f514-kube-api-access-55nnb\") pod \"nova-kuttl-api-0\" (UID: \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.227258 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lh6j\" (UniqueName: \"kubernetes.io/projected/96c2ff5d-b1a5-4987-8c97-83550fa5752c-kube-api-access-6lh6j\") pod \"nova-kuttl-metadata-0\" (UID: \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.323287 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.339315 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.373768 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.805921 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:45:10 crc kubenswrapper[4711]: W0123 08:45:10.809145 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30a3b0ef_6b8e_4f03_9f82_3e139cdc315a.slice/crio-d0bba70eb3300983fd4e21d9fbdf6e0b2ced0e4ecb42b73704bfff13549a0f06 WatchSource:0}: Error finding container d0bba70eb3300983fd4e21d9fbdf6e0b2ced0e4ecb42b73704bfff13549a0f06: Status 404 returned error can't find the container with id d0bba70eb3300983fd4e21d9fbdf6e0b2ced0e4ecb42b73704bfff13549a0f06 Jan 23 08:45:10 crc kubenswrapper[4711]: W0123 08:45:10.810040 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96c2ff5d_b1a5_4987_8c97_83550fa5752c.slice/crio-2f403ec9eb5187700c2c53d29130697baf1d98ff62425b1d7e3022a3dfc0d14e WatchSource:0}: Error finding container 2f403ec9eb5187700c2c53d29130697baf1d98ff62425b1d7e3022a3dfc0d14e: Status 404 returned error can't find the container with id 2f403ec9eb5187700c2c53d29130697baf1d98ff62425b1d7e3022a3dfc0d14e Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.816285 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.920342 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a","Type":"ContainerStarted","Data":"d0bba70eb3300983fd4e21d9fbdf6e0b2ced0e4ecb42b73704bfff13549a0f06"} Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.924661 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"96c2ff5d-b1a5-4987-8c97-83550fa5752c","Type":"ContainerStarted","Data":"2f403ec9eb5187700c2c53d29130697baf1d98ff62425b1d7e3022a3dfc0d14e"} Jan 23 08:45:10 crc kubenswrapper[4711]: I0123 08:45:10.928165 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:45:10 crc kubenswrapper[4711]: W0123 08:45:10.932464 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d962003_fa65_47c9_8ddf_78bfd9e0f514.slice/crio-6eb27513b1e7486d36b9a8d6d11b6dd76c6e41d81d512e7f645f6a18b0a12d3c WatchSource:0}: Error finding container 6eb27513b1e7486d36b9a8d6d11b6dd76c6e41d81d512e7f645f6a18b0a12d3c: Status 404 returned error can't find the container with id 6eb27513b1e7486d36b9a8d6d11b6dd76c6e41d81d512e7f645f6a18b0a12d3c Jan 23 08:45:11 crc kubenswrapper[4711]: I0123 08:45:11.482472 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d782917-e4f6-44e9-baa7-402c86c6013e" path="/var/lib/kubelet/pods/2d782917-e4f6-44e9-baa7-402c86c6013e/volumes" Jan 23 08:45:11 crc kubenswrapper[4711]: I0123 08:45:11.483326 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b12f593-a449-4aa0-90a3-eabdd2e57dee" path="/var/lib/kubelet/pods/3b12f593-a449-4aa0-90a3-eabdd2e57dee/volumes" Jan 23 08:45:11 crc kubenswrapper[4711]: I0123 08:45:11.483915 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3575122-fbcd-4969-a48e-7fb5660c20af" path="/var/lib/kubelet/pods/e3575122-fbcd-4969-a48e-7fb5660c20af/volumes" Jan 23 08:45:11 crc kubenswrapper[4711]: I0123 08:45:11.934687 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a","Type":"ContainerStarted","Data":"c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8"} Jan 23 08:45:11 crc kubenswrapper[4711]: I0123 08:45:11.937213 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"4d962003-fa65-47c9-8ddf-78bfd9e0f514","Type":"ContainerStarted","Data":"03caf64a295062133147f3bb668b62f849d1cc217356ec0a23d574c2771f725d"} Jan 23 08:45:11 crc kubenswrapper[4711]: I0123 08:45:11.937296 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"4d962003-fa65-47c9-8ddf-78bfd9e0f514","Type":"ContainerStarted","Data":"cc9487395b5d4ff32ecb1db5568739c385de8782ea1d5ac1e08a98c154ad4963"} Jan 23 08:45:11 crc kubenswrapper[4711]: I0123 08:45:11.937400 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"4d962003-fa65-47c9-8ddf-78bfd9e0f514","Type":"ContainerStarted","Data":"6eb27513b1e7486d36b9a8d6d11b6dd76c6e41d81d512e7f645f6a18b0a12d3c"} Jan 23 08:45:11 crc kubenswrapper[4711]: I0123 08:45:11.939182 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"96c2ff5d-b1a5-4987-8c97-83550fa5752c","Type":"ContainerStarted","Data":"6f8d6b2022db695313a3f5e2c387143c26303fd55858daf323b2825815f29f59"} Jan 23 08:45:11 crc kubenswrapper[4711]: I0123 08:45:11.939219 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"96c2ff5d-b1a5-4987-8c97-83550fa5752c","Type":"ContainerStarted","Data":"252bb9810181a29a93fc3d4cc247aa119f74257eba9b324725721b9debea0e45"} Jan 23 08:45:11 crc kubenswrapper[4711]: I0123 08:45:11.955854 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podStartSLOduration=2.955838726 podStartE2EDuration="2.955838726s" podCreationTimestamp="2026-01-23 08:45:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:45:11.951468366 +0000 UTC m=+1497.524424734" watchObservedRunningTime="2026-01-23 08:45:11.955838726 +0000 UTC m=+1497.528795094" Jan 23 08:45:11 crc kubenswrapper[4711]: I0123 08:45:11.977060 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-0" podStartSLOduration=2.977039134 podStartE2EDuration="2.977039134s" podCreationTimestamp="2026-01-23 08:45:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:45:11.972781466 +0000 UTC m=+1497.545737834" watchObservedRunningTime="2026-01-23 08:45:11.977039134 +0000 UTC m=+1497.549995512" Jan 23 08:45:11 crc kubenswrapper[4711]: I0123 08:45:11.992064 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-0" podStartSLOduration=2.992047825 podStartE2EDuration="2.992047825s" podCreationTimestamp="2026-01-23 08:45:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:45:11.991762328 +0000 UTC m=+1497.564718736" watchObservedRunningTime="2026-01-23 08:45:11.992047825 +0000 UTC m=+1497.565004193" Jan 23 08:45:15 crc kubenswrapper[4711]: I0123 08:45:15.324346 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:15 crc kubenswrapper[4711]: I0123 08:45:15.339815 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:15 crc kubenswrapper[4711]: I0123 08:45:15.339881 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:20 crc kubenswrapper[4711]: I0123 08:45:20.323806 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:20 crc kubenswrapper[4711]: I0123 08:45:20.339653 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:20 crc kubenswrapper[4711]: I0123 08:45:20.339718 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:20 crc kubenswrapper[4711]: I0123 08:45:20.347993 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:20 crc kubenswrapper[4711]: I0123 08:45:20.374406 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:20 crc kubenswrapper[4711]: I0123 08:45:20.374499 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:21 crc kubenswrapper[4711]: I0123 08:45:21.055173 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:45:21 crc kubenswrapper[4711]: I0123 08:45:21.505693 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.144:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:45:21 crc kubenswrapper[4711]: I0123 08:45:21.506222 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.145:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:45:21 crc kubenswrapper[4711]: I0123 08:45:21.506288 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.144:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:45:21 crc kubenswrapper[4711]: I0123 08:45:21.506331 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.145:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:45:30 crc kubenswrapper[4711]: I0123 08:45:30.343444 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:30 crc kubenswrapper[4711]: I0123 08:45:30.344011 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:30 crc kubenswrapper[4711]: I0123 08:45:30.346480 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:30 crc kubenswrapper[4711]: I0123 08:45:30.346942 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:45:30 crc kubenswrapper[4711]: I0123 08:45:30.392370 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:30 crc kubenswrapper[4711]: I0123 08:45:30.392781 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:30 crc kubenswrapper[4711]: I0123 08:45:30.394351 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:30 crc kubenswrapper[4711]: I0123 08:45:30.396198 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:31 crc kubenswrapper[4711]: I0123 08:45:31.115203 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:45:31 crc kubenswrapper[4711]: I0123 08:45:31.118051 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:46:18 crc kubenswrapper[4711]: I0123 08:46:18.699244 4711 scope.go:117] "RemoveContainer" containerID="e42547e8adbef23aeb04aa8863314e680316841c1b25d88926284f703acba8d1" Jan 23 08:46:18 crc kubenswrapper[4711]: I0123 08:46:18.726806 4711 scope.go:117] "RemoveContainer" containerID="474c0a893709ccd3774540e8c5f5671626f570c2396bd51bd189fd711a76167e" Jan 23 08:46:18 crc kubenswrapper[4711]: I0123 08:46:18.765657 4711 scope.go:117] "RemoveContainer" containerID="18adc39da558e753913106afe00dcc3908d8edfd5ff3dfaa5b75d17bf4af4e13" Jan 23 08:46:18 crc kubenswrapper[4711]: I0123 08:46:18.802174 4711 scope.go:117] "RemoveContainer" containerID="a921f124d3747adbc1d15710fc8da7574d650743d83e854cb2284108913735fa" Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.139078 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9mfjg"] Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.141789 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.148084 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9mfjg"] Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.170684 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/113006f4-8a8d-4213-ab2a-bbf0abd533f6-utilities\") pod \"community-operators-9mfjg\" (UID: \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\") " pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.170771 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/113006f4-8a8d-4213-ab2a-bbf0abd533f6-catalog-content\") pod \"community-operators-9mfjg\" (UID: \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\") " pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.170880 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z92fk\" (UniqueName: \"kubernetes.io/projected/113006f4-8a8d-4213-ab2a-bbf0abd533f6-kube-api-access-z92fk\") pod \"community-operators-9mfjg\" (UID: \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\") " pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.272315 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/113006f4-8a8d-4213-ab2a-bbf0abd533f6-utilities\") pod \"community-operators-9mfjg\" (UID: \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\") " pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.272409 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/113006f4-8a8d-4213-ab2a-bbf0abd533f6-catalog-content\") pod \"community-operators-9mfjg\" (UID: \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\") " pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.272564 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z92fk\" (UniqueName: \"kubernetes.io/projected/113006f4-8a8d-4213-ab2a-bbf0abd533f6-kube-api-access-z92fk\") pod \"community-operators-9mfjg\" (UID: \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\") " pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.272939 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/113006f4-8a8d-4213-ab2a-bbf0abd533f6-utilities\") pod \"community-operators-9mfjg\" (UID: \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\") " pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.273025 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/113006f4-8a8d-4213-ab2a-bbf0abd533f6-catalog-content\") pod \"community-operators-9mfjg\" (UID: \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\") " pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.298469 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z92fk\" (UniqueName: \"kubernetes.io/projected/113006f4-8a8d-4213-ab2a-bbf0abd533f6-kube-api-access-z92fk\") pod \"community-operators-9mfjg\" (UID: \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\") " pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.462288 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:02 crc kubenswrapper[4711]: I0123 08:47:02.955078 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9mfjg"] Jan 23 08:47:03 crc kubenswrapper[4711]: I0123 08:47:03.862907 4711 generic.go:334] "Generic (PLEG): container finished" podID="113006f4-8a8d-4213-ab2a-bbf0abd533f6" containerID="87db737c82e7d5326534798dcf3931a588de510255f5ff908408298a16b51e14" exitCode=0 Jan 23 08:47:03 crc kubenswrapper[4711]: I0123 08:47:03.863666 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mfjg" event={"ID":"113006f4-8a8d-4213-ab2a-bbf0abd533f6","Type":"ContainerDied","Data":"87db737c82e7d5326534798dcf3931a588de510255f5ff908408298a16b51e14"} Jan 23 08:47:03 crc kubenswrapper[4711]: I0123 08:47:03.863786 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mfjg" event={"ID":"113006f4-8a8d-4213-ab2a-bbf0abd533f6","Type":"ContainerStarted","Data":"26e9505f5389ffdbfe53cf6e154e83c8bc1b219c5f120c4ea00b7de238a9d914"} Jan 23 08:47:05 crc kubenswrapper[4711]: I0123 08:47:05.879185 4711 generic.go:334] "Generic (PLEG): container finished" podID="113006f4-8a8d-4213-ab2a-bbf0abd533f6" containerID="8c91b21a518c72868d138c8cca6328c5d93d097284c80cd8bab2ea94529475d7" exitCode=0 Jan 23 08:47:05 crc kubenswrapper[4711]: I0123 08:47:05.879365 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mfjg" event={"ID":"113006f4-8a8d-4213-ab2a-bbf0abd533f6","Type":"ContainerDied","Data":"8c91b21a518c72868d138c8cca6328c5d93d097284c80cd8bab2ea94529475d7"} Jan 23 08:47:07 crc kubenswrapper[4711]: I0123 08:47:07.898548 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mfjg" event={"ID":"113006f4-8a8d-4213-ab2a-bbf0abd533f6","Type":"ContainerStarted","Data":"2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece"} Jan 23 08:47:07 crc kubenswrapper[4711]: I0123 08:47:07.918369 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9mfjg" podStartSLOduration=2.635999778 podStartE2EDuration="5.91834739s" podCreationTimestamp="2026-01-23 08:47:02 +0000 UTC" firstStartedPulling="2026-01-23 08:47:03.868673287 +0000 UTC m=+1609.441629655" lastFinishedPulling="2026-01-23 08:47:07.151020899 +0000 UTC m=+1612.723977267" observedRunningTime="2026-01-23 08:47:07.914178377 +0000 UTC m=+1613.487134745" watchObservedRunningTime="2026-01-23 08:47:07.91834739 +0000 UTC m=+1613.491303758" Jan 23 08:47:12 crc kubenswrapper[4711]: I0123 08:47:12.462791 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:12 crc kubenswrapper[4711]: I0123 08:47:12.463158 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:12 crc kubenswrapper[4711]: I0123 08:47:12.506710 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:12 crc kubenswrapper[4711]: I0123 08:47:12.978687 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:13 crc kubenswrapper[4711]: I0123 08:47:13.027347 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9mfjg"] Jan 23 08:47:14 crc kubenswrapper[4711]: I0123 08:47:14.954354 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9mfjg" podUID="113006f4-8a8d-4213-ab2a-bbf0abd533f6" containerName="registry-server" containerID="cri-o://2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece" gracePeriod=2 Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.365281 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.472562 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/113006f4-8a8d-4213-ab2a-bbf0abd533f6-catalog-content\") pod \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\" (UID: \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\") " Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.472690 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z92fk\" (UniqueName: \"kubernetes.io/projected/113006f4-8a8d-4213-ab2a-bbf0abd533f6-kube-api-access-z92fk\") pod \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\" (UID: \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\") " Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.472732 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/113006f4-8a8d-4213-ab2a-bbf0abd533f6-utilities\") pod \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\" (UID: \"113006f4-8a8d-4213-ab2a-bbf0abd533f6\") " Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.474845 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/113006f4-8a8d-4213-ab2a-bbf0abd533f6-utilities" (OuterVolumeSpecName: "utilities") pod "113006f4-8a8d-4213-ab2a-bbf0abd533f6" (UID: "113006f4-8a8d-4213-ab2a-bbf0abd533f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.478716 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/113006f4-8a8d-4213-ab2a-bbf0abd533f6-kube-api-access-z92fk" (OuterVolumeSpecName: "kube-api-access-z92fk") pod "113006f4-8a8d-4213-ab2a-bbf0abd533f6" (UID: "113006f4-8a8d-4213-ab2a-bbf0abd533f6"). InnerVolumeSpecName "kube-api-access-z92fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.525254 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/113006f4-8a8d-4213-ab2a-bbf0abd533f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "113006f4-8a8d-4213-ab2a-bbf0abd533f6" (UID: "113006f4-8a8d-4213-ab2a-bbf0abd533f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.575255 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/113006f4-8a8d-4213-ab2a-bbf0abd533f6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.575332 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z92fk\" (UniqueName: \"kubernetes.io/projected/113006f4-8a8d-4213-ab2a-bbf0abd533f6-kube-api-access-z92fk\") on node \"crc\" DevicePath \"\"" Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.575349 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/113006f4-8a8d-4213-ab2a-bbf0abd533f6-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.964288 4711 generic.go:334] "Generic (PLEG): container finished" podID="113006f4-8a8d-4213-ab2a-bbf0abd533f6" containerID="2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece" exitCode=0 Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.964333 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mfjg" event={"ID":"113006f4-8a8d-4213-ab2a-bbf0abd533f6","Type":"ContainerDied","Data":"2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece"} Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.964696 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mfjg" event={"ID":"113006f4-8a8d-4213-ab2a-bbf0abd533f6","Type":"ContainerDied","Data":"26e9505f5389ffdbfe53cf6e154e83c8bc1b219c5f120c4ea00b7de238a9d914"} Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.964719 4711 scope.go:117] "RemoveContainer" containerID="2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece" Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.964347 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mfjg" Jan 23 08:47:15 crc kubenswrapper[4711]: I0123 08:47:15.986158 4711 scope.go:117] "RemoveContainer" containerID="8c91b21a518c72868d138c8cca6328c5d93d097284c80cd8bab2ea94529475d7" Jan 23 08:47:16 crc kubenswrapper[4711]: I0123 08:47:16.009577 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9mfjg"] Jan 23 08:47:16 crc kubenswrapper[4711]: I0123 08:47:16.018481 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9mfjg"] Jan 23 08:47:16 crc kubenswrapper[4711]: I0123 08:47:16.021708 4711 scope.go:117] "RemoveContainer" containerID="87db737c82e7d5326534798dcf3931a588de510255f5ff908408298a16b51e14" Jan 23 08:47:16 crc kubenswrapper[4711]: I0123 08:47:16.055856 4711 scope.go:117] "RemoveContainer" containerID="2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece" Jan 23 08:47:16 crc kubenswrapper[4711]: E0123 08:47:16.056548 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece\": container with ID starting with 2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece not found: ID does not exist" containerID="2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece" Jan 23 08:47:16 crc kubenswrapper[4711]: I0123 08:47:16.056588 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece"} err="failed to get container status \"2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece\": rpc error: code = NotFound desc = could not find container \"2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece\": container with ID starting with 2033bff50de35678b4ff83b896ef21e152f903d37a51bf1762e06b7da774fece not found: ID does not exist" Jan 23 08:47:16 crc kubenswrapper[4711]: I0123 08:47:16.056609 4711 scope.go:117] "RemoveContainer" containerID="8c91b21a518c72868d138c8cca6328c5d93d097284c80cd8bab2ea94529475d7" Jan 23 08:47:16 crc kubenswrapper[4711]: E0123 08:47:16.057022 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c91b21a518c72868d138c8cca6328c5d93d097284c80cd8bab2ea94529475d7\": container with ID starting with 8c91b21a518c72868d138c8cca6328c5d93d097284c80cd8bab2ea94529475d7 not found: ID does not exist" containerID="8c91b21a518c72868d138c8cca6328c5d93d097284c80cd8bab2ea94529475d7" Jan 23 08:47:16 crc kubenswrapper[4711]: I0123 08:47:16.057102 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c91b21a518c72868d138c8cca6328c5d93d097284c80cd8bab2ea94529475d7"} err="failed to get container status \"8c91b21a518c72868d138c8cca6328c5d93d097284c80cd8bab2ea94529475d7\": rpc error: code = NotFound desc = could not find container \"8c91b21a518c72868d138c8cca6328c5d93d097284c80cd8bab2ea94529475d7\": container with ID starting with 8c91b21a518c72868d138c8cca6328c5d93d097284c80cd8bab2ea94529475d7 not found: ID does not exist" Jan 23 08:47:16 crc kubenswrapper[4711]: I0123 08:47:16.057160 4711 scope.go:117] "RemoveContainer" containerID="87db737c82e7d5326534798dcf3931a588de510255f5ff908408298a16b51e14" Jan 23 08:47:16 crc kubenswrapper[4711]: E0123 08:47:16.057751 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87db737c82e7d5326534798dcf3931a588de510255f5ff908408298a16b51e14\": container with ID starting with 87db737c82e7d5326534798dcf3931a588de510255f5ff908408298a16b51e14 not found: ID does not exist" containerID="87db737c82e7d5326534798dcf3931a588de510255f5ff908408298a16b51e14" Jan 23 08:47:16 crc kubenswrapper[4711]: I0123 08:47:16.057779 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87db737c82e7d5326534798dcf3931a588de510255f5ff908408298a16b51e14"} err="failed to get container status \"87db737c82e7d5326534798dcf3931a588de510255f5ff908408298a16b51e14\": rpc error: code = NotFound desc = could not find container \"87db737c82e7d5326534798dcf3931a588de510255f5ff908408298a16b51e14\": container with ID starting with 87db737c82e7d5326534798dcf3931a588de510255f5ff908408298a16b51e14 not found: ID does not exist" Jan 23 08:47:17 crc kubenswrapper[4711]: I0123 08:47:17.486809 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="113006f4-8a8d-4213-ab2a-bbf0abd533f6" path="/var/lib/kubelet/pods/113006f4-8a8d-4213-ab2a-bbf0abd533f6/volumes" Jan 23 08:47:18 crc kubenswrapper[4711]: I0123 08:47:18.909370 4711 scope.go:117] "RemoveContainer" containerID="a677978ad45e0ced786848ea5f63c6fc496d6cce3270f396b60ee07b19ff10cf" Jan 23 08:47:25 crc kubenswrapper[4711]: I0123 08:47:25.995282 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:47:25 crc kubenswrapper[4711]: I0123 08:47:25.995851 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:47:55 crc kubenswrapper[4711]: I0123 08:47:55.993291 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:47:55 crc kubenswrapper[4711]: I0123 08:47:55.993804 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.769926 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6"] Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.777892 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-nr2b6"] Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.793941 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr"] Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.802472 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-nljvr"] Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.895001 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.895252 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="30a3b0ef-6b8e-4f03-9f82-3e139cdc315a" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8" gracePeriod=30 Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.906979 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/novaapi9e56-account-delete-x42sj"] Jan 23 08:48:08 crc kubenswrapper[4711]: E0123 08:48:08.907347 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="113006f4-8a8d-4213-ab2a-bbf0abd533f6" containerName="registry-server" Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.907360 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="113006f4-8a8d-4213-ab2a-bbf0abd533f6" containerName="registry-server" Jan 23 08:48:08 crc kubenswrapper[4711]: E0123 08:48:08.907386 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="113006f4-8a8d-4213-ab2a-bbf0abd533f6" containerName="extract-utilities" Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.907392 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="113006f4-8a8d-4213-ab2a-bbf0abd533f6" containerName="extract-utilities" Jan 23 08:48:08 crc kubenswrapper[4711]: E0123 08:48:08.907401 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="113006f4-8a8d-4213-ab2a-bbf0abd533f6" containerName="extract-content" Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.907407 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="113006f4-8a8d-4213-ab2a-bbf0abd533f6" containerName="extract-content" Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.907577 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="113006f4-8a8d-4213-ab2a-bbf0abd533f6" containerName="registry-server" Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.908158 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.914244 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novaapi9e56-account-delete-x42sj"] Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.965353 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/novacell0dab5-account-delete-4mn2b"] Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.966416 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" Jan 23 08:48:08 crc kubenswrapper[4711]: I0123 08:48:08.981204 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novacell0dab5-account-delete-4mn2b"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.022322 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.035671 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.035963 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" podUID="92ef4f76-c65c-4b02-8cf0-574d68712a48" containerName="nova-kuttl-cell1-conductor-conductor" containerID="cri-o://957b3eac16d7706343ab805453158385e1efb937792970bb856578b6bd429146" gracePeriod=30 Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.055563 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-w8xz2"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.073034 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/novacell17ea3-account-delete-4lbbj"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.074383 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.078590 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5722c11-ffec-4153-9bbd-909824696dee-operator-scripts\") pod \"novacell0dab5-account-delete-4mn2b\" (UID: \"b5722c11-ffec-4153-9bbd-909824696dee\") " pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.078728 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c99e30b0-10c7-443e-a604-532eb74a00ba-operator-scripts\") pod \"novaapi9e56-account-delete-x42sj\" (UID: \"c99e30b0-10c7-443e-a604-532eb74a00ba\") " pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.078767 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lnk9\" (UniqueName: \"kubernetes.io/projected/c99e30b0-10c7-443e-a604-532eb74a00ba-kube-api-access-5lnk9\") pod \"novaapi9e56-account-delete-x42sj\" (UID: \"c99e30b0-10c7-443e-a604-532eb74a00ba\") " pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.078849 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d4vr\" (UniqueName: \"kubernetes.io/projected/b5722c11-ffec-4153-9bbd-909824696dee-kube-api-access-9d4vr\") pod \"novacell0dab5-account-delete-4mn2b\" (UID: \"b5722c11-ffec-4153-9bbd-909824696dee\") " pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.087157 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novacell17ea3-account-delete-4lbbj"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.097758 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.098031 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" podUID="2b9edb96-ed68-4de0-85a1-e40bd22b63b1" containerName="nova-kuttl-cell1-novncproxy-novncproxy" containerID="cri-o://1b59c5a45bec5d7be1d5919c52c8fb4fa3ee4e5bbf942fc6e799cf8c0f8112e2" gracePeriod=30 Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.105281 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.118460 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.118720 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" podUID="59ac78e6-c45e-42a0-b959-d1225d2e7d44" containerName="nova-kuttl-cell0-conductor-conductor" containerID="cri-o://62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a" gracePeriod=30 Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.130808 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-cl75m"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.172709 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.172994 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerName="nova-kuttl-metadata-log" containerID="cri-o://252bb9810181a29a93fc3d4cc247aa119f74257eba9b324725721b9debea0e45" gracePeriod=30 Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.173939 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerName="nova-kuttl-metadata-metadata" containerID="cri-o://6f8d6b2022db695313a3f5e2c387143c26303fd55858daf323b2825815f29f59" gracePeriod=30 Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.179969 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c99e30b0-10c7-443e-a604-532eb74a00ba-operator-scripts\") pod \"novaapi9e56-account-delete-x42sj\" (UID: \"c99e30b0-10c7-443e-a604-532eb74a00ba\") " pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.180027 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lnk9\" (UniqueName: \"kubernetes.io/projected/c99e30b0-10c7-443e-a604-532eb74a00ba-kube-api-access-5lnk9\") pod \"novaapi9e56-account-delete-x42sj\" (UID: \"c99e30b0-10c7-443e-a604-532eb74a00ba\") " pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.180062 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de9066b4-0026-4604-a66b-f53176cb219c-operator-scripts\") pod \"novacell17ea3-account-delete-4lbbj\" (UID: \"de9066b4-0026-4604-a66b-f53176cb219c\") " pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.180144 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d4vr\" (UniqueName: \"kubernetes.io/projected/b5722c11-ffec-4153-9bbd-909824696dee-kube-api-access-9d4vr\") pod \"novacell0dab5-account-delete-4mn2b\" (UID: \"b5722c11-ffec-4153-9bbd-909824696dee\") " pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.180181 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5722c11-ffec-4153-9bbd-909824696dee-operator-scripts\") pod \"novacell0dab5-account-delete-4mn2b\" (UID: \"b5722c11-ffec-4153-9bbd-909824696dee\") " pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.180213 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chqtc\" (UniqueName: \"kubernetes.io/projected/de9066b4-0026-4604-a66b-f53176cb219c-kube-api-access-chqtc\") pod \"novacell17ea3-account-delete-4lbbj\" (UID: \"de9066b4-0026-4604-a66b-f53176cb219c\") " pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.181233 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c99e30b0-10c7-443e-a604-532eb74a00ba-operator-scripts\") pod \"novaapi9e56-account-delete-x42sj\" (UID: \"c99e30b0-10c7-443e-a604-532eb74a00ba\") " pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.181886 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5722c11-ffec-4153-9bbd-909824696dee-operator-scripts\") pod \"novacell0dab5-account-delete-4mn2b\" (UID: \"b5722c11-ffec-4153-9bbd-909824696dee\") " pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.189460 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.192762 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerName="nova-kuttl-api-log" containerID="cri-o://cc9487395b5d4ff32ecb1db5568739c385de8782ea1d5ac1e08a98c154ad4963" gracePeriod=30 Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.192799 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerName="nova-kuttl-api-api" containerID="cri-o://03caf64a295062133147f3bb668b62f849d1cc217356ec0a23d574c2771f725d" gracePeriod=30 Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.222982 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lnk9\" (UniqueName: \"kubernetes.io/projected/c99e30b0-10c7-443e-a604-532eb74a00ba-kube-api-access-5lnk9\") pod \"novaapi9e56-account-delete-x42sj\" (UID: \"c99e30b0-10c7-443e-a604-532eb74a00ba\") " pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.228956 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.241778 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d4vr\" (UniqueName: \"kubernetes.io/projected/b5722c11-ffec-4153-9bbd-909824696dee-kube-api-access-9d4vr\") pod \"novacell0dab5-account-delete-4mn2b\" (UID: \"b5722c11-ffec-4153-9bbd-909824696dee\") " pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.281337 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de9066b4-0026-4604-a66b-f53176cb219c-operator-scripts\") pod \"novacell17ea3-account-delete-4lbbj\" (UID: \"de9066b4-0026-4604-a66b-f53176cb219c\") " pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.281436 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chqtc\" (UniqueName: \"kubernetes.io/projected/de9066b4-0026-4604-a66b-f53176cb219c-kube-api-access-chqtc\") pod \"novacell17ea3-account-delete-4lbbj\" (UID: \"de9066b4-0026-4604-a66b-f53176cb219c\") " pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.282975 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de9066b4-0026-4604-a66b-f53176cb219c-operator-scripts\") pod \"novacell17ea3-account-delete-4lbbj\" (UID: \"de9066b4-0026-4604-a66b-f53176cb219c\") " pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.283184 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.298942 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chqtc\" (UniqueName: \"kubernetes.io/projected/de9066b4-0026-4604-a66b-f53176cb219c-kube-api-access-chqtc\") pod \"novacell17ea3-account-delete-4lbbj\" (UID: \"de9066b4-0026-4604-a66b-f53176cb219c\") " pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.409085 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.517160 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41acc8d8-39b4-4f8f-bcf1-8cb5df438be7" path="/var/lib/kubelet/pods/41acc8d8-39b4-4f8f-bcf1-8cb5df438be7/volumes" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.518168 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f49a860-f013-4823-864f-0e09c4d1211c" path="/var/lib/kubelet/pods/7f49a860-f013-4823-864f-0e09c4d1211c/volumes" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.518975 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e6e254a-f600-44ae-8925-6ad10a1220e5" path="/var/lib/kubelet/pods/9e6e254a-f600-44ae-8925-6ad10a1220e5/volumes" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.522202 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d169e211-d958-49c3-b47c-8aaa334f37b8" path="/var/lib/kubelet/pods/d169e211-d958-49c3-b47c-8aaa334f37b8/volumes" Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.743215 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novaapi9e56-account-delete-x42sj"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.836691 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novacell0dab5-account-delete-4mn2b"] Jan 23 08:48:09 crc kubenswrapper[4711]: I0123 08:48:09.977018 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novacell17ea3-account-delete-4lbbj"] Jan 23 08:48:10 crc kubenswrapper[4711]: E0123 08:48:10.080145 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="957b3eac16d7706343ab805453158385e1efb937792970bb856578b6bd429146" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:48:10 crc kubenswrapper[4711]: E0123 08:48:10.082213 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="957b3eac16d7706343ab805453158385e1efb937792970bb856578b6bd429146" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:48:10 crc kubenswrapper[4711]: E0123 08:48:10.083309 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="957b3eac16d7706343ab805453158385e1efb937792970bb856578b6bd429146" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:48:10 crc kubenswrapper[4711]: E0123 08:48:10.083366 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" podUID="92ef4f76-c65c-4b02-8cf0-574d68712a48" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:48:10 crc kubenswrapper[4711]: E0123 08:48:10.326587 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:48:10 crc kubenswrapper[4711]: E0123 08:48:10.328302 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:48:10 crc kubenswrapper[4711]: E0123 08:48:10.329490 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:48:10 crc kubenswrapper[4711]: E0123 08:48:10.329555 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="30a3b0ef-6b8e-4f03-9f82-3e139cdc315a" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.412640 4711 generic.go:334] "Generic (PLEG): container finished" podID="2b9edb96-ed68-4de0-85a1-e40bd22b63b1" containerID="1b59c5a45bec5d7be1d5919c52c8fb4fa3ee4e5bbf942fc6e799cf8c0f8112e2" exitCode=0 Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.412736 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"2b9edb96-ed68-4de0-85a1-e40bd22b63b1","Type":"ContainerDied","Data":"1b59c5a45bec5d7be1d5919c52c8fb4fa3ee4e5bbf942fc6e799cf8c0f8112e2"} Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.412791 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"2b9edb96-ed68-4de0-85a1-e40bd22b63b1","Type":"ContainerDied","Data":"438185acd24559f07962f610b18cb0ef57659ecf70295317aa623416f63f601c"} Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.412806 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="438185acd24559f07962f610b18cb0ef57659ecf70295317aa623416f63f601c" Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.415285 4711 generic.go:334] "Generic (PLEG): container finished" podID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerID="252bb9810181a29a93fc3d4cc247aa119f74257eba9b324725721b9debea0e45" exitCode=143 Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.415364 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"96c2ff5d-b1a5-4987-8c97-83550fa5752c","Type":"ContainerDied","Data":"252bb9810181a29a93fc3d4cc247aa119f74257eba9b324725721b9debea0e45"} Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.417190 4711 generic.go:334] "Generic (PLEG): container finished" podID="b5722c11-ffec-4153-9bbd-909824696dee" containerID="6a93b5ad381ef5d0dcca27897e98d07b366a221c0a7579d7b8fc3b33527518af" exitCode=0 Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.417272 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" event={"ID":"b5722c11-ffec-4153-9bbd-909824696dee","Type":"ContainerDied","Data":"6a93b5ad381ef5d0dcca27897e98d07b366a221c0a7579d7b8fc3b33527518af"} Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.417300 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" event={"ID":"b5722c11-ffec-4153-9bbd-909824696dee","Type":"ContainerStarted","Data":"fec19ee0db6706ab692dad0c884446a49aab24c8e2c9a9e3db9697adadb21559"} Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.418663 4711 generic.go:334] "Generic (PLEG): container finished" podID="de9066b4-0026-4604-a66b-f53176cb219c" containerID="c37c6b568fa769199921cbfa931e492461b68d3680db861b37bf394111ef3576" exitCode=0 Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.418731 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" event={"ID":"de9066b4-0026-4604-a66b-f53176cb219c","Type":"ContainerDied","Data":"c37c6b568fa769199921cbfa931e492461b68d3680db861b37bf394111ef3576"} Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.418752 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" event={"ID":"de9066b4-0026-4604-a66b-f53176cb219c","Type":"ContainerStarted","Data":"d6ffefa7ac975efed356410c6fa1a9aa19ef210ee1d5d83004b30697ab679300"} Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.420518 4711 generic.go:334] "Generic (PLEG): container finished" podID="c99e30b0-10c7-443e-a604-532eb74a00ba" containerID="d5cc5ce4207b3c97d647aa53dbb8b465a7f3afd37fac958d4c49c3520124600b" exitCode=0 Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.420573 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" event={"ID":"c99e30b0-10c7-443e-a604-532eb74a00ba","Type":"ContainerDied","Data":"d5cc5ce4207b3c97d647aa53dbb8b465a7f3afd37fac958d4c49c3520124600b"} Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.420593 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" event={"ID":"c99e30b0-10c7-443e-a604-532eb74a00ba","Type":"ContainerStarted","Data":"acc1133998eb90a5a0e9cde67c4709c394eae379702e62a507f6e965a44b89b5"} Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.423132 4711 generic.go:334] "Generic (PLEG): container finished" podID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerID="cc9487395b5d4ff32ecb1db5568739c385de8782ea1d5ac1e08a98c154ad4963" exitCode=143 Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.423163 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"4d962003-fa65-47c9-8ddf-78bfd9e0f514","Type":"ContainerDied","Data":"cc9487395b5d4ff32ecb1db5568739c385de8782ea1d5ac1e08a98c154ad4963"} Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.459639 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.619248 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98wk2\" (UniqueName: \"kubernetes.io/projected/2b9edb96-ed68-4de0-85a1-e40bd22b63b1-kube-api-access-98wk2\") pod \"2b9edb96-ed68-4de0-85a1-e40bd22b63b1\" (UID: \"2b9edb96-ed68-4de0-85a1-e40bd22b63b1\") " Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.619367 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b9edb96-ed68-4de0-85a1-e40bd22b63b1-config-data\") pod \"2b9edb96-ed68-4de0-85a1-e40bd22b63b1\" (UID: \"2b9edb96-ed68-4de0-85a1-e40bd22b63b1\") " Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.626324 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b9edb96-ed68-4de0-85a1-e40bd22b63b1-kube-api-access-98wk2" (OuterVolumeSpecName: "kube-api-access-98wk2") pod "2b9edb96-ed68-4de0-85a1-e40bd22b63b1" (UID: "2b9edb96-ed68-4de0-85a1-e40bd22b63b1"). InnerVolumeSpecName "kube-api-access-98wk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.644873 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b9edb96-ed68-4de0-85a1-e40bd22b63b1-config-data" (OuterVolumeSpecName: "config-data") pod "2b9edb96-ed68-4de0-85a1-e40bd22b63b1" (UID: "2b9edb96-ed68-4de0-85a1-e40bd22b63b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.721296 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98wk2\" (UniqueName: \"kubernetes.io/projected/2b9edb96-ed68-4de0-85a1-e40bd22b63b1-kube-api-access-98wk2\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:10 crc kubenswrapper[4711]: I0123 08:48:10.721338 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b9edb96-ed68-4de0-85a1-e40bd22b63b1-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:11 crc kubenswrapper[4711]: I0123 08:48:11.430147 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:11 crc kubenswrapper[4711]: I0123 08:48:11.469718 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:48:11 crc kubenswrapper[4711]: I0123 08:48:11.486932 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:48:11 crc kubenswrapper[4711]: I0123 08:48:11.872252 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" Jan 23 08:48:11 crc kubenswrapper[4711]: I0123 08:48:11.881067 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" Jan 23 08:48:11 crc kubenswrapper[4711]: I0123 08:48:11.889358 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.045477 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9d4vr\" (UniqueName: \"kubernetes.io/projected/b5722c11-ffec-4153-9bbd-909824696dee-kube-api-access-9d4vr\") pod \"b5722c11-ffec-4153-9bbd-909824696dee\" (UID: \"b5722c11-ffec-4153-9bbd-909824696dee\") " Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.045612 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c99e30b0-10c7-443e-a604-532eb74a00ba-operator-scripts\") pod \"c99e30b0-10c7-443e-a604-532eb74a00ba\" (UID: \"c99e30b0-10c7-443e-a604-532eb74a00ba\") " Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.045685 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5722c11-ffec-4153-9bbd-909824696dee-operator-scripts\") pod \"b5722c11-ffec-4153-9bbd-909824696dee\" (UID: \"b5722c11-ffec-4153-9bbd-909824696dee\") " Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.045709 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de9066b4-0026-4604-a66b-f53176cb219c-operator-scripts\") pod \"de9066b4-0026-4604-a66b-f53176cb219c\" (UID: \"de9066b4-0026-4604-a66b-f53176cb219c\") " Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.045741 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lnk9\" (UniqueName: \"kubernetes.io/projected/c99e30b0-10c7-443e-a604-532eb74a00ba-kube-api-access-5lnk9\") pod \"c99e30b0-10c7-443e-a604-532eb74a00ba\" (UID: \"c99e30b0-10c7-443e-a604-532eb74a00ba\") " Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.045831 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chqtc\" (UniqueName: \"kubernetes.io/projected/de9066b4-0026-4604-a66b-f53176cb219c-kube-api-access-chqtc\") pod \"de9066b4-0026-4604-a66b-f53176cb219c\" (UID: \"de9066b4-0026-4604-a66b-f53176cb219c\") " Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.046529 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de9066b4-0026-4604-a66b-f53176cb219c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "de9066b4-0026-4604-a66b-f53176cb219c" (UID: "de9066b4-0026-4604-a66b-f53176cb219c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.046543 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5722c11-ffec-4153-9bbd-909824696dee-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b5722c11-ffec-4153-9bbd-909824696dee" (UID: "b5722c11-ffec-4153-9bbd-909824696dee"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.046623 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c99e30b0-10c7-443e-a604-532eb74a00ba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c99e30b0-10c7-443e-a604-532eb74a00ba" (UID: "c99e30b0-10c7-443e-a604-532eb74a00ba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.051342 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de9066b4-0026-4604-a66b-f53176cb219c-kube-api-access-chqtc" (OuterVolumeSpecName: "kube-api-access-chqtc") pod "de9066b4-0026-4604-a66b-f53176cb219c" (UID: "de9066b4-0026-4604-a66b-f53176cb219c"). InnerVolumeSpecName "kube-api-access-chqtc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.051394 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c99e30b0-10c7-443e-a604-532eb74a00ba-kube-api-access-5lnk9" (OuterVolumeSpecName: "kube-api-access-5lnk9") pod "c99e30b0-10c7-443e-a604-532eb74a00ba" (UID: "c99e30b0-10c7-443e-a604-532eb74a00ba"). InnerVolumeSpecName "kube-api-access-5lnk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.051485 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5722c11-ffec-4153-9bbd-909824696dee-kube-api-access-9d4vr" (OuterVolumeSpecName: "kube-api-access-9d4vr") pod "b5722c11-ffec-4153-9bbd-909824696dee" (UID: "b5722c11-ffec-4153-9bbd-909824696dee"). InnerVolumeSpecName "kube-api-access-9d4vr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.147846 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9d4vr\" (UniqueName: \"kubernetes.io/projected/b5722c11-ffec-4153-9bbd-909824696dee-kube-api-access-9d4vr\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.147887 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c99e30b0-10c7-443e-a604-532eb74a00ba-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.147900 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5722c11-ffec-4153-9bbd-909824696dee-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.147913 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de9066b4-0026-4604-a66b-f53176cb219c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.147925 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lnk9\" (UniqueName: \"kubernetes.io/projected/c99e30b0-10c7-443e-a604-532eb74a00ba-kube-api-access-5lnk9\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.147937 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chqtc\" (UniqueName: \"kubernetes.io/projected/de9066b4-0026-4604-a66b-f53176cb219c-kube-api-access-chqtc\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.305293 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.144:8775/\": read tcp 10.217.0.2:32886->10.217.0.144:8775: read: connection reset by peer" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.305296 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.144:8775/\": read tcp 10.217.0.2:32900->10.217.0.144:8775: read: connection reset by peer" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.353019 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.145:8774/\": read tcp 10.217.0.2:54008->10.217.0.145:8774: read: connection reset by peer" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.353824 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.145:8774/\": read tcp 10.217.0.2:53998->10.217.0.145:8774: read: connection reset by peer" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.448952 4711 generic.go:334] "Generic (PLEG): container finished" podID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerID="03caf64a295062133147f3bb668b62f849d1cc217356ec0a23d574c2771f725d" exitCode=0 Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.449024 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"4d962003-fa65-47c9-8ddf-78bfd9e0f514","Type":"ContainerDied","Data":"03caf64a295062133147f3bb668b62f849d1cc217356ec0a23d574c2771f725d"} Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.453950 4711 generic.go:334] "Generic (PLEG): container finished" podID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerID="6f8d6b2022db695313a3f5e2c387143c26303fd55858daf323b2825815f29f59" exitCode=0 Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.454037 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"96c2ff5d-b1a5-4987-8c97-83550fa5752c","Type":"ContainerDied","Data":"6f8d6b2022db695313a3f5e2c387143c26303fd55858daf323b2825815f29f59"} Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.457146 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" event={"ID":"b5722c11-ffec-4153-9bbd-909824696dee","Type":"ContainerDied","Data":"fec19ee0db6706ab692dad0c884446a49aab24c8e2c9a9e3db9697adadb21559"} Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.457203 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fec19ee0db6706ab692dad0c884446a49aab24c8e2c9a9e3db9697adadb21559" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.457297 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell0dab5-account-delete-4mn2b" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.460299 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.460293 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell17ea3-account-delete-4lbbj" event={"ID":"de9066b4-0026-4604-a66b-f53176cb219c","Type":"ContainerDied","Data":"d6ffefa7ac975efed356410c6fa1a9aa19ef210ee1d5d83004b30697ab679300"} Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.460353 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6ffefa7ac975efed356410c6fa1a9aa19ef210ee1d5d83004b30697ab679300" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.491750 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" event={"ID":"c99e30b0-10c7-443e-a604-532eb74a00ba","Type":"ContainerDied","Data":"acc1133998eb90a5a0e9cde67c4709c394eae379702e62a507f6e965a44b89b5"} Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.491790 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="acc1133998eb90a5a0e9cde67c4709c394eae379702e62a507f6e965a44b89b5" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.491844 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novaapi9e56-account-delete-x42sj" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.685425 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.727749 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.857743 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lh6j\" (UniqueName: \"kubernetes.io/projected/96c2ff5d-b1a5-4987-8c97-83550fa5752c-kube-api-access-6lh6j\") pod \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\" (UID: \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\") " Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.857848 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c2ff5d-b1a5-4987-8c97-83550fa5752c-config-data\") pod \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\" (UID: \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\") " Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.857877 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96c2ff5d-b1a5-4987-8c97-83550fa5752c-logs\") pod \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\" (UID: \"96c2ff5d-b1a5-4987-8c97-83550fa5752c\") " Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.857910 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55nnb\" (UniqueName: \"kubernetes.io/projected/4d962003-fa65-47c9-8ddf-78bfd9e0f514-kube-api-access-55nnb\") pod \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\" (UID: \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\") " Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.857950 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d962003-fa65-47c9-8ddf-78bfd9e0f514-config-data\") pod \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\" (UID: \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\") " Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.858038 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d962003-fa65-47c9-8ddf-78bfd9e0f514-logs\") pod \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\" (UID: \"4d962003-fa65-47c9-8ddf-78bfd9e0f514\") " Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.858642 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96c2ff5d-b1a5-4987-8c97-83550fa5752c-logs" (OuterVolumeSpecName: "logs") pod "96c2ff5d-b1a5-4987-8c97-83550fa5752c" (UID: "96c2ff5d-b1a5-4987-8c97-83550fa5752c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.858996 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d962003-fa65-47c9-8ddf-78bfd9e0f514-logs" (OuterVolumeSpecName: "logs") pod "4d962003-fa65-47c9-8ddf-78bfd9e0f514" (UID: "4d962003-fa65-47c9-8ddf-78bfd9e0f514"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.862812 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d962003-fa65-47c9-8ddf-78bfd9e0f514-kube-api-access-55nnb" (OuterVolumeSpecName: "kube-api-access-55nnb") pod "4d962003-fa65-47c9-8ddf-78bfd9e0f514" (UID: "4d962003-fa65-47c9-8ddf-78bfd9e0f514"). InnerVolumeSpecName "kube-api-access-55nnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.863632 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96c2ff5d-b1a5-4987-8c97-83550fa5752c-kube-api-access-6lh6j" (OuterVolumeSpecName: "kube-api-access-6lh6j") pod "96c2ff5d-b1a5-4987-8c97-83550fa5752c" (UID: "96c2ff5d-b1a5-4987-8c97-83550fa5752c"). InnerVolumeSpecName "kube-api-access-6lh6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.882266 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c2ff5d-b1a5-4987-8c97-83550fa5752c-config-data" (OuterVolumeSpecName: "config-data") pod "96c2ff5d-b1a5-4987-8c97-83550fa5752c" (UID: "96c2ff5d-b1a5-4987-8c97-83550fa5752c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.886306 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d962003-fa65-47c9-8ddf-78bfd9e0f514-config-data" (OuterVolumeSpecName: "config-data") pod "4d962003-fa65-47c9-8ddf-78bfd9e0f514" (UID: "4d962003-fa65-47c9-8ddf-78bfd9e0f514"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.960324 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lh6j\" (UniqueName: \"kubernetes.io/projected/96c2ff5d-b1a5-4987-8c97-83550fa5752c-kube-api-access-6lh6j\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.960352 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c2ff5d-b1a5-4987-8c97-83550fa5752c-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.960362 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96c2ff5d-b1a5-4987-8c97-83550fa5752c-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.960370 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55nnb\" (UniqueName: \"kubernetes.io/projected/4d962003-fa65-47c9-8ddf-78bfd9e0f514-kube-api-access-55nnb\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.960378 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d962003-fa65-47c9-8ddf-78bfd9e0f514-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:12 crc kubenswrapper[4711]: I0123 08:48:12.960386 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d962003-fa65-47c9-8ddf-78bfd9e0f514-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.223114 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.365946 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g46wf\" (UniqueName: \"kubernetes.io/projected/59ac78e6-c45e-42a0-b959-d1225d2e7d44-kube-api-access-g46wf\") pod \"59ac78e6-c45e-42a0-b959-d1225d2e7d44\" (UID: \"59ac78e6-c45e-42a0-b959-d1225d2e7d44\") " Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.366060 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59ac78e6-c45e-42a0-b959-d1225d2e7d44-config-data\") pod \"59ac78e6-c45e-42a0-b959-d1225d2e7d44\" (UID: \"59ac78e6-c45e-42a0-b959-d1225d2e7d44\") " Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.368957 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59ac78e6-c45e-42a0-b959-d1225d2e7d44-kube-api-access-g46wf" (OuterVolumeSpecName: "kube-api-access-g46wf") pod "59ac78e6-c45e-42a0-b959-d1225d2e7d44" (UID: "59ac78e6-c45e-42a0-b959-d1225d2e7d44"). InnerVolumeSpecName "kube-api-access-g46wf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.386856 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59ac78e6-c45e-42a0-b959-d1225d2e7d44-config-data" (OuterVolumeSpecName: "config-data") pod "59ac78e6-c45e-42a0-b959-d1225d2e7d44" (UID: "59ac78e6-c45e-42a0-b959-d1225d2e7d44"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.467610 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g46wf\" (UniqueName: \"kubernetes.io/projected/59ac78e6-c45e-42a0-b959-d1225d2e7d44-kube-api-access-g46wf\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.467648 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59ac78e6-c45e-42a0-b959-d1225d2e7d44-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.482524 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b9edb96-ed68-4de0-85a1-e40bd22b63b1" path="/var/lib/kubelet/pods/2b9edb96-ed68-4de0-85a1-e40bd22b63b1/volumes" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.502603 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"96c2ff5d-b1a5-4987-8c97-83550fa5752c","Type":"ContainerDied","Data":"2f403ec9eb5187700c2c53d29130697baf1d98ff62425b1d7e3022a3dfc0d14e"} Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.502727 4711 scope.go:117] "RemoveContainer" containerID="6f8d6b2022db695313a3f5e2c387143c26303fd55858daf323b2825815f29f59" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.503000 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.507361 4711 generic.go:334] "Generic (PLEG): container finished" podID="59ac78e6-c45e-42a0-b959-d1225d2e7d44" containerID="62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a" exitCode=0 Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.507455 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.507485 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"59ac78e6-c45e-42a0-b959-d1225d2e7d44","Type":"ContainerDied","Data":"62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a"} Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.508126 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"59ac78e6-c45e-42a0-b959-d1225d2e7d44","Type":"ContainerDied","Data":"49632a46baaa3484224380653ba747b58fb6a0074a055e534c8478c6d5342037"} Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.518084 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"4d962003-fa65-47c9-8ddf-78bfd9e0f514","Type":"ContainerDied","Data":"6eb27513b1e7486d36b9a8d6d11b6dd76c6e41d81d512e7f645f6a18b0a12d3c"} Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.518117 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.533469 4711 scope.go:117] "RemoveContainer" containerID="252bb9810181a29a93fc3d4cc247aa119f74257eba9b324725721b9debea0e45" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.541244 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.549295 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.559258 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.565081 4711 scope.go:117] "RemoveContainer" containerID="62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.566717 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.573987 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.581227 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.583828 4711 scope.go:117] "RemoveContainer" containerID="62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a" Jan 23 08:48:13 crc kubenswrapper[4711]: E0123 08:48:13.584265 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a\": container with ID starting with 62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a not found: ID does not exist" containerID="62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.584304 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a"} err="failed to get container status \"62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a\": rpc error: code = NotFound desc = could not find container \"62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a\": container with ID starting with 62133fb77de90361a06c3a83ae4713777b1f4f078b2eb9b8b7ebd4965765da9a not found: ID does not exist" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.584327 4711 scope.go:117] "RemoveContainer" containerID="03caf64a295062133147f3bb668b62f849d1cc217356ec0a23d574c2771f725d" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.611661 4711 scope.go:117] "RemoveContainer" containerID="cc9487395b5d4ff32ecb1db5568739c385de8782ea1d5ac1e08a98c154ad4963" Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.914075 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-api-db-create-v84r6"] Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.924172 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-api-db-create-v84r6"] Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.937796 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b"] Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.944605 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/novaapi9e56-account-delete-x42sj"] Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.952082 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-api-9e56-account-create-update-xfj2b"] Jan 23 08:48:13 crc kubenswrapper[4711]: I0123 08:48:13.960495 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/novaapi9e56-account-delete-x42sj"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.024802 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-spdnp"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.033754 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-spdnp"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.044387 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/novacell0dab5-account-delete-4mn2b"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.052928 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.058664 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell0-dab5-account-create-update-pvx6d"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.067389 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/novacell0dab5-account-delete-4mn2b"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.113659 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-hxfhf"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.120036 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-hxfhf"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.133728 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/novacell17ea3-account-delete-4lbbj"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.139071 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.145231 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/novacell17ea3-account-delete-4lbbj"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.151653 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell1-7ea3-account-create-update-zngqd"] Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.529149 4711 generic.go:334] "Generic (PLEG): container finished" podID="92ef4f76-c65c-4b02-8cf0-574d68712a48" containerID="957b3eac16d7706343ab805453158385e1efb937792970bb856578b6bd429146" exitCode=0 Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.529228 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"92ef4f76-c65c-4b02-8cf0-574d68712a48","Type":"ContainerDied","Data":"957b3eac16d7706343ab805453158385e1efb937792970bb856578b6bd429146"} Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.673354 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.796020 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r94dp\" (UniqueName: \"kubernetes.io/projected/92ef4f76-c65c-4b02-8cf0-574d68712a48-kube-api-access-r94dp\") pod \"92ef4f76-c65c-4b02-8cf0-574d68712a48\" (UID: \"92ef4f76-c65c-4b02-8cf0-574d68712a48\") " Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.796116 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92ef4f76-c65c-4b02-8cf0-574d68712a48-config-data\") pod \"92ef4f76-c65c-4b02-8cf0-574d68712a48\" (UID: \"92ef4f76-c65c-4b02-8cf0-574d68712a48\") " Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.836557 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92ef4f76-c65c-4b02-8cf0-574d68712a48-kube-api-access-r94dp" (OuterVolumeSpecName: "kube-api-access-r94dp") pod "92ef4f76-c65c-4b02-8cf0-574d68712a48" (UID: "92ef4f76-c65c-4b02-8cf0-574d68712a48"). InnerVolumeSpecName "kube-api-access-r94dp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.859147 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92ef4f76-c65c-4b02-8cf0-574d68712a48-config-data" (OuterVolumeSpecName: "config-data") pod "92ef4f76-c65c-4b02-8cf0-574d68712a48" (UID: "92ef4f76-c65c-4b02-8cf0-574d68712a48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.898873 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r94dp\" (UniqueName: \"kubernetes.io/projected/92ef4f76-c65c-4b02-8cf0-574d68712a48-kube-api-access-r94dp\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:14 crc kubenswrapper[4711]: I0123 08:48:14.898932 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92ef4f76-c65c-4b02-8cf0-574d68712a48-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:15 crc kubenswrapper[4711]: E0123 08:48:15.327858 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:48:15 crc kubenswrapper[4711]: E0123 08:48:15.330202 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:48:15 crc kubenswrapper[4711]: E0123 08:48:15.332038 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:48:15 crc kubenswrapper[4711]: E0123 08:48:15.332159 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="30a3b0ef-6b8e-4f03-9f82-3e139cdc315a" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.423433 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" podUID="2b9edb96-ed68-4de0-85a1-e40bd22b63b1" containerName="nova-kuttl-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"http://10.217.0.135:6080/vnc_lite.html\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.484025 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d1e6c02-734b-4427-8817-2a06ff94e5a0" path="/var/lib/kubelet/pods/0d1e6c02-734b-4427-8817-2a06ff94e5a0/volumes" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.484772 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" path="/var/lib/kubelet/pods/4d962003-fa65-47c9-8ddf-78bfd9e0f514/volumes" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.485450 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59ac78e6-c45e-42a0-b959-d1225d2e7d44" path="/var/lib/kubelet/pods/59ac78e6-c45e-42a0-b959-d1225d2e7d44/volumes" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.486683 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8" path="/var/lib/kubelet/pods/62c3a2da-aa4f-4cef-8a9e-d3cf3c3e9ca8/volumes" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.487378 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75efce06-d8d9-435f-850b-da48f7191d74" path="/var/lib/kubelet/pods/75efce06-d8d9-435f-850b-da48f7191d74/volumes" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.488067 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" path="/var/lib/kubelet/pods/96c2ff5d-b1a5-4987-8c97-83550fa5752c/volumes" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.489224 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5722c11-ffec-4153-9bbd-909824696dee" path="/var/lib/kubelet/pods/b5722c11-ffec-4153-9bbd-909824696dee/volumes" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.489903 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9662d64-6770-40c9-82c4-787eced67f4d" path="/var/lib/kubelet/pods/c9662d64-6770-40c9-82c4-787eced67f4d/volumes" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.490501 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c99e30b0-10c7-443e-a604-532eb74a00ba" path="/var/lib/kubelet/pods/c99e30b0-10c7-443e-a604-532eb74a00ba/volumes" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.491087 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de9066b4-0026-4604-a66b-f53176cb219c" path="/var/lib/kubelet/pods/de9066b4-0026-4604-a66b-f53176cb219c/volumes" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.492145 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e92d7a9b-a014-4599-b179-1aaecf94ef42" path="/var/lib/kubelet/pods/e92d7a9b-a014-4599-b179-1aaecf94ef42/volumes" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.492996 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4382f53-4838-48fd-b802-b0912baaac04" path="/var/lib/kubelet/pods/f4382f53-4838-48fd-b802-b0912baaac04/volumes" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.540411 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"92ef4f76-c65c-4b02-8cf0-574d68712a48","Type":"ContainerDied","Data":"97e68bd6e9a0457e313d09930b8cdf4bfa7e711dbaab355751d42de27f07dbe5"} Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.540473 4711 scope.go:117] "RemoveContainer" containerID="957b3eac16d7706343ab805453158385e1efb937792970bb856578b6bd429146" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.540593 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.569475 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:48:15 crc kubenswrapper[4711]: I0123 08:48:15.576902 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.719325 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-api-db-create-j56tp"] Jan 23 08:48:16 crc kubenswrapper[4711]: E0123 08:48:16.719781 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerName="nova-kuttl-metadata-log" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.719800 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerName="nova-kuttl-metadata-log" Jan 23 08:48:16 crc kubenswrapper[4711]: E0123 08:48:16.719810 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerName="nova-kuttl-api-api" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.719818 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerName="nova-kuttl-api-api" Jan 23 08:48:16 crc kubenswrapper[4711]: E0123 08:48:16.719831 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b9edb96-ed68-4de0-85a1-e40bd22b63b1" containerName="nova-kuttl-cell1-novncproxy-novncproxy" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.719839 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b9edb96-ed68-4de0-85a1-e40bd22b63b1" containerName="nova-kuttl-cell1-novncproxy-novncproxy" Jan 23 08:48:16 crc kubenswrapper[4711]: E0123 08:48:16.719858 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5722c11-ffec-4153-9bbd-909824696dee" containerName="mariadb-account-delete" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.719868 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5722c11-ffec-4153-9bbd-909824696dee" containerName="mariadb-account-delete" Jan 23 08:48:16 crc kubenswrapper[4711]: E0123 08:48:16.719882 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59ac78e6-c45e-42a0-b959-d1225d2e7d44" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720135 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="59ac78e6-c45e-42a0-b959-d1225d2e7d44" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:48:16 crc kubenswrapper[4711]: E0123 08:48:16.720151 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerName="nova-kuttl-metadata-metadata" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720159 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerName="nova-kuttl-metadata-metadata" Jan 23 08:48:16 crc kubenswrapper[4711]: E0123 08:48:16.720178 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerName="nova-kuttl-api-log" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720188 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerName="nova-kuttl-api-log" Jan 23 08:48:16 crc kubenswrapper[4711]: E0123 08:48:16.720205 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de9066b4-0026-4604-a66b-f53176cb219c" containerName="mariadb-account-delete" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720214 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="de9066b4-0026-4604-a66b-f53176cb219c" containerName="mariadb-account-delete" Jan 23 08:48:16 crc kubenswrapper[4711]: E0123 08:48:16.720225 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c99e30b0-10c7-443e-a604-532eb74a00ba" containerName="mariadb-account-delete" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720232 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c99e30b0-10c7-443e-a604-532eb74a00ba" containerName="mariadb-account-delete" Jan 23 08:48:16 crc kubenswrapper[4711]: E0123 08:48:16.720251 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92ef4f76-c65c-4b02-8cf0-574d68712a48" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720260 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="92ef4f76-c65c-4b02-8cf0-574d68712a48" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720433 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b9edb96-ed68-4de0-85a1-e40bd22b63b1" containerName="nova-kuttl-cell1-novncproxy-novncproxy" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720450 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerName="nova-kuttl-api-api" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720462 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c99e30b0-10c7-443e-a604-532eb74a00ba" containerName="mariadb-account-delete" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720477 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="92ef4f76-c65c-4b02-8cf0-574d68712a48" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720487 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerName="nova-kuttl-metadata-metadata" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720508 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="59ac78e6-c45e-42a0-b959-d1225d2e7d44" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720518 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="96c2ff5d-b1a5-4987-8c97-83550fa5752c" containerName="nova-kuttl-metadata-log" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720528 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="de9066b4-0026-4604-a66b-f53176cb219c" containerName="mariadb-account-delete" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720539 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5722c11-ffec-4153-9bbd-909824696dee" containerName="mariadb-account-delete" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.720574 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d962003-fa65-47c9-8ddf-78bfd9e0f514" containerName="nova-kuttl-api-log" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.721200 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-j56tp" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.725893 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-api-c035-account-create-update-lb8vg"] Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.727800 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.729562 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-api-db-secret" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.737498 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-db-create-j56tp"] Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.745369 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-c035-account-create-update-lb8vg"] Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.825313 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-rm9fn"] Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.826329 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.828407 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00a709f3-ebe2-4c67-bf82-13e14b658eb3-operator-scripts\") pod \"nova-api-db-create-j56tp\" (UID: \"00a709f3-ebe2-4c67-bf82-13e14b658eb3\") " pod="nova-kuttl-default/nova-api-db-create-j56tp" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.828492 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k74n9\" (UniqueName: \"kubernetes.io/projected/00a709f3-ebe2-4c67-bf82-13e14b658eb3-kube-api-access-k74n9\") pod \"nova-api-db-create-j56tp\" (UID: \"00a709f3-ebe2-4c67-bf82-13e14b658eb3\") " pod="nova-kuttl-default/nova-api-db-create-j56tp" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.828569 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23189694-c571-4400-b89b-084ce0ebc613-operator-scripts\") pod \"nova-api-c035-account-create-update-lb8vg\" (UID: \"23189694-c571-4400-b89b-084ce0ebc613\") " pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.828628 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr6pw\" (UniqueName: \"kubernetes.io/projected/23189694-c571-4400-b89b-084ce0ebc613-kube-api-access-sr6pw\") pod \"nova-api-c035-account-create-update-lb8vg\" (UID: \"23189694-c571-4400-b89b-084ce0ebc613\") " pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.834874 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-rm9fn"] Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.926293 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-vhvsz"] Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.927678 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.929985 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00a709f3-ebe2-4c67-bf82-13e14b658eb3-operator-scripts\") pod \"nova-api-db-create-j56tp\" (UID: \"00a709f3-ebe2-4c67-bf82-13e14b658eb3\") " pod="nova-kuttl-default/nova-api-db-create-j56tp" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.930088 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jclbw\" (UniqueName: \"kubernetes.io/projected/a9fa2633-02dc-4fff-9308-d70167fd430e-kube-api-access-jclbw\") pod \"nova-cell0-db-create-rm9fn\" (UID: \"a9fa2633-02dc-4fff-9308-d70167fd430e\") " pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.930119 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k74n9\" (UniqueName: \"kubernetes.io/projected/00a709f3-ebe2-4c67-bf82-13e14b658eb3-kube-api-access-k74n9\") pod \"nova-api-db-create-j56tp\" (UID: \"00a709f3-ebe2-4c67-bf82-13e14b658eb3\") " pod="nova-kuttl-default/nova-api-db-create-j56tp" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.930178 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23189694-c571-4400-b89b-084ce0ebc613-operator-scripts\") pod \"nova-api-c035-account-create-update-lb8vg\" (UID: \"23189694-c571-4400-b89b-084ce0ebc613\") " pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.930231 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9fa2633-02dc-4fff-9308-d70167fd430e-operator-scripts\") pod \"nova-cell0-db-create-rm9fn\" (UID: \"a9fa2633-02dc-4fff-9308-d70167fd430e\") " pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.930262 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr6pw\" (UniqueName: \"kubernetes.io/projected/23189694-c571-4400-b89b-084ce0ebc613-kube-api-access-sr6pw\") pod \"nova-api-c035-account-create-update-lb8vg\" (UID: \"23189694-c571-4400-b89b-084ce0ebc613\") " pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.931096 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00a709f3-ebe2-4c67-bf82-13e14b658eb3-operator-scripts\") pod \"nova-api-db-create-j56tp\" (UID: \"00a709f3-ebe2-4c67-bf82-13e14b658eb3\") " pod="nova-kuttl-default/nova-api-db-create-j56tp" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.931102 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23189694-c571-4400-b89b-084ce0ebc613-operator-scripts\") pod \"nova-api-c035-account-create-update-lb8vg\" (UID: \"23189694-c571-4400-b89b-084ce0ebc613\") " pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.934589 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4"] Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.935563 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.937397 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-cell0-db-secret" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.941740 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-vhvsz"] Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.946320 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4"] Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.965929 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k74n9\" (UniqueName: \"kubernetes.io/projected/00a709f3-ebe2-4c67-bf82-13e14b658eb3-kube-api-access-k74n9\") pod \"nova-api-db-create-j56tp\" (UID: \"00a709f3-ebe2-4c67-bf82-13e14b658eb3\") " pod="nova-kuttl-default/nova-api-db-create-j56tp" Jan 23 08:48:16 crc kubenswrapper[4711]: I0123 08:48:16.965929 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr6pw\" (UniqueName: \"kubernetes.io/projected/23189694-c571-4400-b89b-084ce0ebc613-kube-api-access-sr6pw\") pod \"nova-api-c035-account-create-update-lb8vg\" (UID: \"23189694-c571-4400-b89b-084ce0ebc613\") " pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.031358 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f09a9eb4-8481-496b-96d3-dbd5ac4e2172-operator-scripts\") pod \"nova-cell1-db-create-vhvsz\" (UID: \"f09a9eb4-8481-496b-96d3-dbd5ac4e2172\") " pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.031439 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e24aad2a-3579-4da8-b755-6e6edee47096-operator-scripts\") pod \"nova-cell0-1f13-account-create-update-6xgp4\" (UID: \"e24aad2a-3579-4da8-b755-6e6edee47096\") " pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.031472 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jclbw\" (UniqueName: \"kubernetes.io/projected/a9fa2633-02dc-4fff-9308-d70167fd430e-kube-api-access-jclbw\") pod \"nova-cell0-db-create-rm9fn\" (UID: \"a9fa2633-02dc-4fff-9308-d70167fd430e\") " pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.031501 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjgrt\" (UniqueName: \"kubernetes.io/projected/f09a9eb4-8481-496b-96d3-dbd5ac4e2172-kube-api-access-mjgrt\") pod \"nova-cell1-db-create-vhvsz\" (UID: \"f09a9eb4-8481-496b-96d3-dbd5ac4e2172\") " pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.031584 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9fa2633-02dc-4fff-9308-d70167fd430e-operator-scripts\") pod \"nova-cell0-db-create-rm9fn\" (UID: \"a9fa2633-02dc-4fff-9308-d70167fd430e\") " pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.031612 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwsp2\" (UniqueName: \"kubernetes.io/projected/e24aad2a-3579-4da8-b755-6e6edee47096-kube-api-access-fwsp2\") pod \"nova-cell0-1f13-account-create-update-6xgp4\" (UID: \"e24aad2a-3579-4da8-b755-6e6edee47096\") " pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.032280 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9fa2633-02dc-4fff-9308-d70167fd430e-operator-scripts\") pod \"nova-cell0-db-create-rm9fn\" (UID: \"a9fa2633-02dc-4fff-9308-d70167fd430e\") " pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.039120 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-j56tp" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.047157 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.048631 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jclbw\" (UniqueName: \"kubernetes.io/projected/a9fa2633-02dc-4fff-9308-d70167fd430e-kube-api-access-jclbw\") pod \"nova-cell0-db-create-rm9fn\" (UID: \"a9fa2633-02dc-4fff-9308-d70167fd430e\") " pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.128808 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59"] Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.132104 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.132983 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f09a9eb4-8481-496b-96d3-dbd5ac4e2172-operator-scripts\") pod \"nova-cell1-db-create-vhvsz\" (UID: \"f09a9eb4-8481-496b-96d3-dbd5ac4e2172\") " pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.133024 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e24aad2a-3579-4da8-b755-6e6edee47096-operator-scripts\") pod \"nova-cell0-1f13-account-create-update-6xgp4\" (UID: \"e24aad2a-3579-4da8-b755-6e6edee47096\") " pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.133109 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjgrt\" (UniqueName: \"kubernetes.io/projected/f09a9eb4-8481-496b-96d3-dbd5ac4e2172-kube-api-access-mjgrt\") pod \"nova-cell1-db-create-vhvsz\" (UID: \"f09a9eb4-8481-496b-96d3-dbd5ac4e2172\") " pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.133197 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwsp2\" (UniqueName: \"kubernetes.io/projected/e24aad2a-3579-4da8-b755-6e6edee47096-kube-api-access-fwsp2\") pod \"nova-cell0-1f13-account-create-update-6xgp4\" (UID: \"e24aad2a-3579-4da8-b755-6e6edee47096\") " pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.133943 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f09a9eb4-8481-496b-96d3-dbd5ac4e2172-operator-scripts\") pod \"nova-cell1-db-create-vhvsz\" (UID: \"f09a9eb4-8481-496b-96d3-dbd5ac4e2172\") " pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.134104 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-cell1-db-secret" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.134218 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e24aad2a-3579-4da8-b755-6e6edee47096-operator-scripts\") pod \"nova-cell0-1f13-account-create-update-6xgp4\" (UID: \"e24aad2a-3579-4da8-b755-6e6edee47096\") " pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.142564 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.150106 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59"] Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.177190 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjgrt\" (UniqueName: \"kubernetes.io/projected/f09a9eb4-8481-496b-96d3-dbd5ac4e2172-kube-api-access-mjgrt\") pod \"nova-cell1-db-create-vhvsz\" (UID: \"f09a9eb4-8481-496b-96d3-dbd5ac4e2172\") " pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.189138 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwsp2\" (UniqueName: \"kubernetes.io/projected/e24aad2a-3579-4da8-b755-6e6edee47096-kube-api-access-fwsp2\") pod \"nova-cell0-1f13-account-create-update-6xgp4\" (UID: \"e24aad2a-3579-4da8-b755-6e6edee47096\") " pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.234443 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zssjv\" (UniqueName: \"kubernetes.io/projected/900647f1-76d3-4aef-be40-87ed4482b0c6-kube-api-access-zssjv\") pod \"nova-cell1-ae24-account-create-update-2sg59\" (UID: \"900647f1-76d3-4aef-be40-87ed4482b0c6\") " pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.234517 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/900647f1-76d3-4aef-be40-87ed4482b0c6-operator-scripts\") pod \"nova-cell1-ae24-account-create-update-2sg59\" (UID: \"900647f1-76d3-4aef-be40-87ed4482b0c6\") " pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.246339 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.259671 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.336591 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zssjv\" (UniqueName: \"kubernetes.io/projected/900647f1-76d3-4aef-be40-87ed4482b0c6-kube-api-access-zssjv\") pod \"nova-cell1-ae24-account-create-update-2sg59\" (UID: \"900647f1-76d3-4aef-be40-87ed4482b0c6\") " pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.336678 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/900647f1-76d3-4aef-be40-87ed4482b0c6-operator-scripts\") pod \"nova-cell1-ae24-account-create-update-2sg59\" (UID: \"900647f1-76d3-4aef-be40-87ed4482b0c6\") " pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.337466 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/900647f1-76d3-4aef-be40-87ed4482b0c6-operator-scripts\") pod \"nova-cell1-ae24-account-create-update-2sg59\" (UID: \"900647f1-76d3-4aef-be40-87ed4482b0c6\") " pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.354659 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zssjv\" (UniqueName: \"kubernetes.io/projected/900647f1-76d3-4aef-be40-87ed4482b0c6-kube-api-access-zssjv\") pod \"nova-cell1-ae24-account-create-update-2sg59\" (UID: \"900647f1-76d3-4aef-be40-87ed4482b0c6\") " pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.483828 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92ef4f76-c65c-4b02-8cf0-574d68712a48" path="/var/lib/kubelet/pods/92ef4f76-c65c-4b02-8cf0-574d68712a48/volumes" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.496560 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.573538 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-db-create-j56tp"] Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.580129 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-c035-account-create-update-lb8vg"] Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.731979 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-rm9fn"] Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.798435 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4"] Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.805434 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-vhvsz"] Jan 23 08:48:17 crc kubenswrapper[4711]: W0123 08:48:17.811654 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf09a9eb4_8481_496b_96d3_dbd5ac4e2172.slice/crio-967ac474d66c3b3b455dc2fb84ae05b95d6a2361cb08d94611f51b3efc4bc2c7 WatchSource:0}: Error finding container 967ac474d66c3b3b455dc2fb84ae05b95d6a2361cb08d94611f51b3efc4bc2c7: Status 404 returned error can't find the container with id 967ac474d66c3b3b455dc2fb84ae05b95d6a2361cb08d94611f51b3efc4bc2c7 Jan 23 08:48:17 crc kubenswrapper[4711]: I0123 08:48:17.958177 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59"] Jan 23 08:48:17 crc kubenswrapper[4711]: W0123 08:48:17.965617 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod900647f1_76d3_4aef_be40_87ed4482b0c6.slice/crio-bcb5b72fc3d06a3b686123941e84b4c9355c3149d59d65ab0907122e897d7bcf WatchSource:0}: Error finding container bcb5b72fc3d06a3b686123941e84b4c9355c3149d59d65ab0907122e897d7bcf: Status 404 returned error can't find the container with id bcb5b72fc3d06a3b686123941e84b4c9355c3149d59d65ab0907122e897d7bcf Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.567563 4711 generic.go:334] "Generic (PLEG): container finished" podID="a9fa2633-02dc-4fff-9308-d70167fd430e" containerID="0bd27b971d233e79b0d00816b739c3be6c8a2406f8152bf429ac3cd7a2e6edca" exitCode=0 Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.567624 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" event={"ID":"a9fa2633-02dc-4fff-9308-d70167fd430e","Type":"ContainerDied","Data":"0bd27b971d233e79b0d00816b739c3be6c8a2406f8152bf429ac3cd7a2e6edca"} Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.567897 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" event={"ID":"a9fa2633-02dc-4fff-9308-d70167fd430e","Type":"ContainerStarted","Data":"f69bee8dfa2fe2fe3fceff6ac7075e5a48c2ffa9a58fd45ab54b5a2268d1694b"} Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.571003 4711 generic.go:334] "Generic (PLEG): container finished" podID="23189694-c571-4400-b89b-084ce0ebc613" containerID="840ab4013f76229bffb40357995d854bef2a196cc1e80c7ba9737b4a4a23a9c6" exitCode=0 Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.571064 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" event={"ID":"23189694-c571-4400-b89b-084ce0ebc613","Type":"ContainerDied","Data":"840ab4013f76229bffb40357995d854bef2a196cc1e80c7ba9737b4a4a23a9c6"} Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.571086 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" event={"ID":"23189694-c571-4400-b89b-084ce0ebc613","Type":"ContainerStarted","Data":"0cbe240eeeb6ba2913379f0bdaa656391d6d39aecdb1363426b61d89be6fd435"} Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.573492 4711 generic.go:334] "Generic (PLEG): container finished" podID="f09a9eb4-8481-496b-96d3-dbd5ac4e2172" containerID="113b88d1f806ba6aa7c338ee91953eb99c9ea9560b0f4c6bfb3ff472e57d7221" exitCode=0 Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.573643 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" event={"ID":"f09a9eb4-8481-496b-96d3-dbd5ac4e2172","Type":"ContainerDied","Data":"113b88d1f806ba6aa7c338ee91953eb99c9ea9560b0f4c6bfb3ff472e57d7221"} Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.573769 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" event={"ID":"f09a9eb4-8481-496b-96d3-dbd5ac4e2172","Type":"ContainerStarted","Data":"967ac474d66c3b3b455dc2fb84ae05b95d6a2361cb08d94611f51b3efc4bc2c7"} Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.578512 4711 generic.go:334] "Generic (PLEG): container finished" podID="00a709f3-ebe2-4c67-bf82-13e14b658eb3" containerID="5672a92c5e490320e6276ce3cfe62f369e52d799ddbfc4498b8560fa46f91832" exitCode=0 Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.578611 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-j56tp" event={"ID":"00a709f3-ebe2-4c67-bf82-13e14b658eb3","Type":"ContainerDied","Data":"5672a92c5e490320e6276ce3cfe62f369e52d799ddbfc4498b8560fa46f91832"} Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.578643 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-j56tp" event={"ID":"00a709f3-ebe2-4c67-bf82-13e14b658eb3","Type":"ContainerStarted","Data":"6409cfb432ef650a9370d5f7ebb0d594f9ca9218d644ecb1efb3ad174a12747d"} Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.583072 4711 generic.go:334] "Generic (PLEG): container finished" podID="e24aad2a-3579-4da8-b755-6e6edee47096" containerID="36b86eab09dee22ec9f115bb2e075374a728b6174c014138f80a6c7598a4723a" exitCode=0 Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.583127 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" event={"ID":"e24aad2a-3579-4da8-b755-6e6edee47096","Type":"ContainerDied","Data":"36b86eab09dee22ec9f115bb2e075374a728b6174c014138f80a6c7598a4723a"} Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.583198 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" event={"ID":"e24aad2a-3579-4da8-b755-6e6edee47096","Type":"ContainerStarted","Data":"7a5e0ef2f23c261bc5370ea0aa69b61b06473c75f3e1aa4d3655a9029be98898"} Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.585336 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" event={"ID":"900647f1-76d3-4aef-be40-87ed4482b0c6","Type":"ContainerStarted","Data":"1c562cec10b1575989444d35985de6eaedbd8ba83a34a7fa517b24c593936536"} Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.585376 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" event={"ID":"900647f1-76d3-4aef-be40-87ed4482b0c6","Type":"ContainerStarted","Data":"bcb5b72fc3d06a3b686123941e84b4c9355c3149d59d65ab0907122e897d7bcf"} Jan 23 08:48:18 crc kubenswrapper[4711]: I0123 08:48:18.658085 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" podStartSLOduration=1.658048956 podStartE2EDuration="1.658048956s" podCreationTimestamp="2026-01-23 08:48:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:18.651412264 +0000 UTC m=+1684.224368632" watchObservedRunningTime="2026-01-23 08:48:18.658048956 +0000 UTC m=+1684.231005324" Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.159768 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.271204 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6dwq\" (UniqueName: \"kubernetes.io/projected/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a-kube-api-access-j6dwq\") pod \"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a\" (UID: \"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a\") " Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.271402 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a-config-data\") pod \"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a\" (UID: \"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a\") " Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.276834 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a-kube-api-access-j6dwq" (OuterVolumeSpecName: "kube-api-access-j6dwq") pod "30a3b0ef-6b8e-4f03-9f82-3e139cdc315a" (UID: "30a3b0ef-6b8e-4f03-9f82-3e139cdc315a"). InnerVolumeSpecName "kube-api-access-j6dwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.296567 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a-config-data" (OuterVolumeSpecName: "config-data") pod "30a3b0ef-6b8e-4f03-9f82-3e139cdc315a" (UID: "30a3b0ef-6b8e-4f03-9f82-3e139cdc315a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.373247 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.373282 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6dwq\" (UniqueName: \"kubernetes.io/projected/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a-kube-api-access-j6dwq\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.592803 4711 generic.go:334] "Generic (PLEG): container finished" podID="30a3b0ef-6b8e-4f03-9f82-3e139cdc315a" containerID="c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8" exitCode=0 Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.593141 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a","Type":"ContainerDied","Data":"c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8"} Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.593220 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"30a3b0ef-6b8e-4f03-9f82-3e139cdc315a","Type":"ContainerDied","Data":"d0bba70eb3300983fd4e21d9fbdf6e0b2ced0e4ecb42b73704bfff13549a0f06"} Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.593305 4711 scope.go:117] "RemoveContainer" containerID="c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8" Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.593486 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.601647 4711 generic.go:334] "Generic (PLEG): container finished" podID="900647f1-76d3-4aef-be40-87ed4482b0c6" containerID="1c562cec10b1575989444d35985de6eaedbd8ba83a34a7fa517b24c593936536" exitCode=0 Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.601848 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" event={"ID":"900647f1-76d3-4aef-be40-87ed4482b0c6","Type":"ContainerDied","Data":"1c562cec10b1575989444d35985de6eaedbd8ba83a34a7fa517b24c593936536"} Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.632828 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.649515 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.671697 4711 scope.go:117] "RemoveContainer" containerID="c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8" Jan 23 08:48:19 crc kubenswrapper[4711]: E0123 08:48:19.674696 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8\": container with ID starting with c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8 not found: ID does not exist" containerID="c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8" Jan 23 08:48:19 crc kubenswrapper[4711]: I0123 08:48:19.674756 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8"} err="failed to get container status \"c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8\": rpc error: code = NotFound desc = could not find container \"c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8\": container with ID starting with c9de21b388770bb7ae32b509c1bb5d48c96b36292e6e77b210717e8987ce2af8 not found: ID does not exist" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.029390 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.189183 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e24aad2a-3579-4da8-b755-6e6edee47096-operator-scripts\") pod \"e24aad2a-3579-4da8-b755-6e6edee47096\" (UID: \"e24aad2a-3579-4da8-b755-6e6edee47096\") " Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.189572 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwsp2\" (UniqueName: \"kubernetes.io/projected/e24aad2a-3579-4da8-b755-6e6edee47096-kube-api-access-fwsp2\") pod \"e24aad2a-3579-4da8-b755-6e6edee47096\" (UID: \"e24aad2a-3579-4da8-b755-6e6edee47096\") " Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.190031 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e24aad2a-3579-4da8-b755-6e6edee47096-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e24aad2a-3579-4da8-b755-6e6edee47096" (UID: "e24aad2a-3579-4da8-b755-6e6edee47096"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.190907 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e24aad2a-3579-4da8-b755-6e6edee47096-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.193329 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e24aad2a-3579-4da8-b755-6e6edee47096-kube-api-access-fwsp2" (OuterVolumeSpecName: "kube-api-access-fwsp2") pod "e24aad2a-3579-4da8-b755-6e6edee47096" (UID: "e24aad2a-3579-4da8-b755-6e6edee47096"). InnerVolumeSpecName "kube-api-access-fwsp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.257459 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.262388 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.273717 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-j56tp" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.292776 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwsp2\" (UniqueName: \"kubernetes.io/projected/e24aad2a-3579-4da8-b755-6e6edee47096-kube-api-access-fwsp2\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.293469 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.394227 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00a709f3-ebe2-4c67-bf82-13e14b658eb3-operator-scripts\") pod \"00a709f3-ebe2-4c67-bf82-13e14b658eb3\" (UID: \"00a709f3-ebe2-4c67-bf82-13e14b658eb3\") " Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.394347 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k74n9\" (UniqueName: \"kubernetes.io/projected/00a709f3-ebe2-4c67-bf82-13e14b658eb3-kube-api-access-k74n9\") pod \"00a709f3-ebe2-4c67-bf82-13e14b658eb3\" (UID: \"00a709f3-ebe2-4c67-bf82-13e14b658eb3\") " Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.394383 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr6pw\" (UniqueName: \"kubernetes.io/projected/23189694-c571-4400-b89b-084ce0ebc613-kube-api-access-sr6pw\") pod \"23189694-c571-4400-b89b-084ce0ebc613\" (UID: \"23189694-c571-4400-b89b-084ce0ebc613\") " Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.394427 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jclbw\" (UniqueName: \"kubernetes.io/projected/a9fa2633-02dc-4fff-9308-d70167fd430e-kube-api-access-jclbw\") pod \"a9fa2633-02dc-4fff-9308-d70167fd430e\" (UID: \"a9fa2633-02dc-4fff-9308-d70167fd430e\") " Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.394465 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9fa2633-02dc-4fff-9308-d70167fd430e-operator-scripts\") pod \"a9fa2633-02dc-4fff-9308-d70167fd430e\" (UID: \"a9fa2633-02dc-4fff-9308-d70167fd430e\") " Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.394539 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjgrt\" (UniqueName: \"kubernetes.io/projected/f09a9eb4-8481-496b-96d3-dbd5ac4e2172-kube-api-access-mjgrt\") pod \"f09a9eb4-8481-496b-96d3-dbd5ac4e2172\" (UID: \"f09a9eb4-8481-496b-96d3-dbd5ac4e2172\") " Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.394565 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23189694-c571-4400-b89b-084ce0ebc613-operator-scripts\") pod \"23189694-c571-4400-b89b-084ce0ebc613\" (UID: \"23189694-c571-4400-b89b-084ce0ebc613\") " Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.394601 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f09a9eb4-8481-496b-96d3-dbd5ac4e2172-operator-scripts\") pod \"f09a9eb4-8481-496b-96d3-dbd5ac4e2172\" (UID: \"f09a9eb4-8481-496b-96d3-dbd5ac4e2172\") " Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.395331 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23189694-c571-4400-b89b-084ce0ebc613-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "23189694-c571-4400-b89b-084ce0ebc613" (UID: "23189694-c571-4400-b89b-084ce0ebc613"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.395635 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23189694-c571-4400-b89b-084ce0ebc613-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.395663 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9fa2633-02dc-4fff-9308-d70167fd430e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a9fa2633-02dc-4fff-9308-d70167fd430e" (UID: "a9fa2633-02dc-4fff-9308-d70167fd430e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.396008 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f09a9eb4-8481-496b-96d3-dbd5ac4e2172-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f09a9eb4-8481-496b-96d3-dbd5ac4e2172" (UID: "f09a9eb4-8481-496b-96d3-dbd5ac4e2172"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.396158 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00a709f3-ebe2-4c67-bf82-13e14b658eb3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "00a709f3-ebe2-4c67-bf82-13e14b658eb3" (UID: "00a709f3-ebe2-4c67-bf82-13e14b658eb3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.397253 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9fa2633-02dc-4fff-9308-d70167fd430e-kube-api-access-jclbw" (OuterVolumeSpecName: "kube-api-access-jclbw") pod "a9fa2633-02dc-4fff-9308-d70167fd430e" (UID: "a9fa2633-02dc-4fff-9308-d70167fd430e"). InnerVolumeSpecName "kube-api-access-jclbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.397284 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00a709f3-ebe2-4c67-bf82-13e14b658eb3-kube-api-access-k74n9" (OuterVolumeSpecName: "kube-api-access-k74n9") pod "00a709f3-ebe2-4c67-bf82-13e14b658eb3" (UID: "00a709f3-ebe2-4c67-bf82-13e14b658eb3"). InnerVolumeSpecName "kube-api-access-k74n9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.398666 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f09a9eb4-8481-496b-96d3-dbd5ac4e2172-kube-api-access-mjgrt" (OuterVolumeSpecName: "kube-api-access-mjgrt") pod "f09a9eb4-8481-496b-96d3-dbd5ac4e2172" (UID: "f09a9eb4-8481-496b-96d3-dbd5ac4e2172"). InnerVolumeSpecName "kube-api-access-mjgrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.399229 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23189694-c571-4400-b89b-084ce0ebc613-kube-api-access-sr6pw" (OuterVolumeSpecName: "kube-api-access-sr6pw") pod "23189694-c571-4400-b89b-084ce0ebc613" (UID: "23189694-c571-4400-b89b-084ce0ebc613"). InnerVolumeSpecName "kube-api-access-sr6pw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.497161 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f09a9eb4-8481-496b-96d3-dbd5ac4e2172-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.497192 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00a709f3-ebe2-4c67-bf82-13e14b658eb3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.497202 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k74n9\" (UniqueName: \"kubernetes.io/projected/00a709f3-ebe2-4c67-bf82-13e14b658eb3-kube-api-access-k74n9\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.497212 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr6pw\" (UniqueName: \"kubernetes.io/projected/23189694-c571-4400-b89b-084ce0ebc613-kube-api-access-sr6pw\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.497221 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jclbw\" (UniqueName: \"kubernetes.io/projected/a9fa2633-02dc-4fff-9308-d70167fd430e-kube-api-access-jclbw\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.497230 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9fa2633-02dc-4fff-9308-d70167fd430e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.497238 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjgrt\" (UniqueName: \"kubernetes.io/projected/f09a9eb4-8481-496b-96d3-dbd5ac4e2172-kube-api-access-mjgrt\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.608935 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.608930 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-vhvsz" event={"ID":"f09a9eb4-8481-496b-96d3-dbd5ac4e2172","Type":"ContainerDied","Data":"967ac474d66c3b3b455dc2fb84ae05b95d6a2361cb08d94611f51b3efc4bc2c7"} Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.609021 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="967ac474d66c3b3b455dc2fb84ae05b95d6a2361cb08d94611f51b3efc4bc2c7" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.611944 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-j56tp" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.611920 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-j56tp" event={"ID":"00a709f3-ebe2-4c67-bf82-13e14b658eb3","Type":"ContainerDied","Data":"6409cfb432ef650a9370d5f7ebb0d594f9ca9218d644ecb1efb3ad174a12747d"} Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.612369 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6409cfb432ef650a9370d5f7ebb0d594f9ca9218d644ecb1efb3ad174a12747d" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.614660 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" event={"ID":"e24aad2a-3579-4da8-b755-6e6edee47096","Type":"ContainerDied","Data":"7a5e0ef2f23c261bc5370ea0aa69b61b06473c75f3e1aa4d3655a9029be98898"} Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.614673 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.614686 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a5e0ef2f23c261bc5370ea0aa69b61b06473c75f3e1aa4d3655a9029be98898" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.616120 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" event={"ID":"a9fa2633-02dc-4fff-9308-d70167fd430e","Type":"ContainerDied","Data":"f69bee8dfa2fe2fe3fceff6ac7075e5a48c2ffa9a58fd45ab54b5a2268d1694b"} Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.616150 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f69bee8dfa2fe2fe3fceff6ac7075e5a48c2ffa9a58fd45ab54b5a2268d1694b" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.616129 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-rm9fn" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.618151 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" event={"ID":"23189694-c571-4400-b89b-084ce0ebc613","Type":"ContainerDied","Data":"0cbe240eeeb6ba2913379f0bdaa656391d6d39aecdb1363426b61d89be6fd435"} Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.618189 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-c035-account-create-update-lb8vg" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.618229 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cbe240eeeb6ba2913379f0bdaa656391d6d39aecdb1363426b61d89be6fd435" Jan 23 08:48:20 crc kubenswrapper[4711]: I0123 08:48:20.887933 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" Jan 23 08:48:21 crc kubenswrapper[4711]: I0123 08:48:21.012810 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zssjv\" (UniqueName: \"kubernetes.io/projected/900647f1-76d3-4aef-be40-87ed4482b0c6-kube-api-access-zssjv\") pod \"900647f1-76d3-4aef-be40-87ed4482b0c6\" (UID: \"900647f1-76d3-4aef-be40-87ed4482b0c6\") " Jan 23 08:48:21 crc kubenswrapper[4711]: I0123 08:48:21.012928 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/900647f1-76d3-4aef-be40-87ed4482b0c6-operator-scripts\") pod \"900647f1-76d3-4aef-be40-87ed4482b0c6\" (UID: \"900647f1-76d3-4aef-be40-87ed4482b0c6\") " Jan 23 08:48:21 crc kubenswrapper[4711]: I0123 08:48:21.013695 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/900647f1-76d3-4aef-be40-87ed4482b0c6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "900647f1-76d3-4aef-be40-87ed4482b0c6" (UID: "900647f1-76d3-4aef-be40-87ed4482b0c6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:48:21 crc kubenswrapper[4711]: I0123 08:48:21.017127 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/900647f1-76d3-4aef-be40-87ed4482b0c6-kube-api-access-zssjv" (OuterVolumeSpecName: "kube-api-access-zssjv") pod "900647f1-76d3-4aef-be40-87ed4482b0c6" (UID: "900647f1-76d3-4aef-be40-87ed4482b0c6"). InnerVolumeSpecName "kube-api-access-zssjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:21 crc kubenswrapper[4711]: I0123 08:48:21.115244 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/900647f1-76d3-4aef-be40-87ed4482b0c6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:21 crc kubenswrapper[4711]: I0123 08:48:21.115288 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zssjv\" (UniqueName: \"kubernetes.io/projected/900647f1-76d3-4aef-be40-87ed4482b0c6-kube-api-access-zssjv\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:21 crc kubenswrapper[4711]: I0123 08:48:21.482499 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30a3b0ef-6b8e-4f03-9f82-3e139cdc315a" path="/var/lib/kubelet/pods/30a3b0ef-6b8e-4f03-9f82-3e139cdc315a/volumes" Jan 23 08:48:21 crc kubenswrapper[4711]: I0123 08:48:21.626806 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" event={"ID":"900647f1-76d3-4aef-be40-87ed4482b0c6","Type":"ContainerDied","Data":"bcb5b72fc3d06a3b686123941e84b4c9355c3149d59d65ab0907122e897d7bcf"} Jan 23 08:48:21 crc kubenswrapper[4711]: I0123 08:48:21.626855 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bcb5b72fc3d06a3b686123941e84b4c9355c3149d59d65ab0907122e897d7bcf" Jan 23 08:48:21 crc kubenswrapper[4711]: I0123 08:48:21.626919 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.326711 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q"] Jan 23 08:48:22 crc kubenswrapper[4711]: E0123 08:48:22.327013 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f09a9eb4-8481-496b-96d3-dbd5ac4e2172" containerName="mariadb-database-create" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327024 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f09a9eb4-8481-496b-96d3-dbd5ac4e2172" containerName="mariadb-database-create" Jan 23 08:48:22 crc kubenswrapper[4711]: E0123 08:48:22.327033 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9fa2633-02dc-4fff-9308-d70167fd430e" containerName="mariadb-database-create" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327039 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9fa2633-02dc-4fff-9308-d70167fd430e" containerName="mariadb-database-create" Jan 23 08:48:22 crc kubenswrapper[4711]: E0123 08:48:22.327052 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a709f3-ebe2-4c67-bf82-13e14b658eb3" containerName="mariadb-database-create" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327058 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a709f3-ebe2-4c67-bf82-13e14b658eb3" containerName="mariadb-database-create" Jan 23 08:48:22 crc kubenswrapper[4711]: E0123 08:48:22.327069 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23189694-c571-4400-b89b-084ce0ebc613" containerName="mariadb-account-create-update" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327074 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="23189694-c571-4400-b89b-084ce0ebc613" containerName="mariadb-account-create-update" Jan 23 08:48:22 crc kubenswrapper[4711]: E0123 08:48:22.327084 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="900647f1-76d3-4aef-be40-87ed4482b0c6" containerName="mariadb-account-create-update" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327090 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="900647f1-76d3-4aef-be40-87ed4482b0c6" containerName="mariadb-account-create-update" Jan 23 08:48:22 crc kubenswrapper[4711]: E0123 08:48:22.327101 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e24aad2a-3579-4da8-b755-6e6edee47096" containerName="mariadb-account-create-update" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327107 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e24aad2a-3579-4da8-b755-6e6edee47096" containerName="mariadb-account-create-update" Jan 23 08:48:22 crc kubenswrapper[4711]: E0123 08:48:22.327124 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30a3b0ef-6b8e-4f03-9f82-3e139cdc315a" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327130 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="30a3b0ef-6b8e-4f03-9f82-3e139cdc315a" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327277 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="23189694-c571-4400-b89b-084ce0ebc613" containerName="mariadb-account-create-update" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327291 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9fa2633-02dc-4fff-9308-d70167fd430e" containerName="mariadb-database-create" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327300 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="30a3b0ef-6b8e-4f03-9f82-3e139cdc315a" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327306 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="900647f1-76d3-4aef-be40-87ed4482b0c6" containerName="mariadb-account-create-update" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327319 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e24aad2a-3579-4da8-b755-6e6edee47096" containerName="mariadb-account-create-update" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327326 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f09a9eb4-8481-496b-96d3-dbd5ac4e2172" containerName="mariadb-database-create" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327335 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a709f3-ebe2-4c67-bf82-13e14b658eb3" containerName="mariadb-database-create" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.327841 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.329611 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-nova-kuttl-dockercfg-8wdzg" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.329990 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-scripts" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.330786 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-config-data" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.380721 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q"] Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.432747 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83abb416-d991-4a35-a771-a54da2c53e0c-scripts\") pod \"nova-kuttl-cell0-conductor-db-sync-7pd7q\" (UID: \"83abb416-d991-4a35-a771-a54da2c53e0c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.432800 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83abb416-d991-4a35-a771-a54da2c53e0c-config-data\") pod \"nova-kuttl-cell0-conductor-db-sync-7pd7q\" (UID: \"83abb416-d991-4a35-a771-a54da2c53e0c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.432830 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v4wc\" (UniqueName: \"kubernetes.io/projected/83abb416-d991-4a35-a771-a54da2c53e0c-kube-api-access-8v4wc\") pod \"nova-kuttl-cell0-conductor-db-sync-7pd7q\" (UID: \"83abb416-d991-4a35-a771-a54da2c53e0c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.534228 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83abb416-d991-4a35-a771-a54da2c53e0c-config-data\") pod \"nova-kuttl-cell0-conductor-db-sync-7pd7q\" (UID: \"83abb416-d991-4a35-a771-a54da2c53e0c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.534324 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v4wc\" (UniqueName: \"kubernetes.io/projected/83abb416-d991-4a35-a771-a54da2c53e0c-kube-api-access-8v4wc\") pod \"nova-kuttl-cell0-conductor-db-sync-7pd7q\" (UID: \"83abb416-d991-4a35-a771-a54da2c53e0c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.534546 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83abb416-d991-4a35-a771-a54da2c53e0c-scripts\") pod \"nova-kuttl-cell0-conductor-db-sync-7pd7q\" (UID: \"83abb416-d991-4a35-a771-a54da2c53e0c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.539109 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83abb416-d991-4a35-a771-a54da2c53e0c-config-data\") pod \"nova-kuttl-cell0-conductor-db-sync-7pd7q\" (UID: \"83abb416-d991-4a35-a771-a54da2c53e0c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.540028 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83abb416-d991-4a35-a771-a54da2c53e0c-scripts\") pod \"nova-kuttl-cell0-conductor-db-sync-7pd7q\" (UID: \"83abb416-d991-4a35-a771-a54da2c53e0c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.551235 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v4wc\" (UniqueName: \"kubernetes.io/projected/83abb416-d991-4a35-a771-a54da2c53e0c-kube-api-access-8v4wc\") pod \"nova-kuttl-cell0-conductor-db-sync-7pd7q\" (UID: \"83abb416-d991-4a35-a771-a54da2c53e0c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:22 crc kubenswrapper[4711]: I0123 08:48:22.644233 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:23 crc kubenswrapper[4711]: I0123 08:48:23.049411 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q"] Jan 23 08:48:23 crc kubenswrapper[4711]: W0123 08:48:23.056694 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83abb416_d991_4a35_a771_a54da2c53e0c.slice/crio-5d90f16035f0cef6046a761544d9f2cff5d3d147d19d6882c6ba511a624f2b0e WatchSource:0}: Error finding container 5d90f16035f0cef6046a761544d9f2cff5d3d147d19d6882c6ba511a624f2b0e: Status 404 returned error can't find the container with id 5d90f16035f0cef6046a761544d9f2cff5d3d147d19d6882c6ba511a624f2b0e Jan 23 08:48:23 crc kubenswrapper[4711]: I0123 08:48:23.644657 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" event={"ID":"83abb416-d991-4a35-a771-a54da2c53e0c","Type":"ContainerStarted","Data":"c8d3881e274a6b070aff46f00b3cd110c1acaee33c0d643828bcc7126b8f51eb"} Jan 23 08:48:23 crc kubenswrapper[4711]: I0123 08:48:23.644970 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" event={"ID":"83abb416-d991-4a35-a771-a54da2c53e0c","Type":"ContainerStarted","Data":"5d90f16035f0cef6046a761544d9f2cff5d3d147d19d6882c6ba511a624f2b0e"} Jan 23 08:48:23 crc kubenswrapper[4711]: I0123 08:48:23.660651 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" podStartSLOduration=1.660635753 podStartE2EDuration="1.660635753s" podCreationTimestamp="2026-01-23 08:48:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:23.658988953 +0000 UTC m=+1689.231945311" watchObservedRunningTime="2026-01-23 08:48:23.660635753 +0000 UTC m=+1689.233592121" Jan 23 08:48:25 crc kubenswrapper[4711]: I0123 08:48:25.993891 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:48:25 crc kubenswrapper[4711]: I0123 08:48:25.994559 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:48:25 crc kubenswrapper[4711]: I0123 08:48:25.994642 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:48:25 crc kubenswrapper[4711]: I0123 08:48:25.995830 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058"} pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 23 08:48:25 crc kubenswrapper[4711]: I0123 08:48:25.995928 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" containerID="cri-o://3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" gracePeriod=600 Jan 23 08:48:26 crc kubenswrapper[4711]: E0123 08:48:26.122858 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:48:26 crc kubenswrapper[4711]: I0123 08:48:26.679690 4711 generic.go:334] "Generic (PLEG): container finished" podID="3846d4e0-cfda-4e0b-8747-85267de12736" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" exitCode=0 Jan 23 08:48:26 crc kubenswrapper[4711]: I0123 08:48:26.679772 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerDied","Data":"3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058"} Jan 23 08:48:26 crc kubenswrapper[4711]: I0123 08:48:26.680095 4711 scope.go:117] "RemoveContainer" containerID="509b113ae3fd960091847020bbc2a0f41a3fb8b6e06cdd9e7afa31b3382efa17" Jan 23 08:48:26 crc kubenswrapper[4711]: I0123 08:48:26.680675 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:48:26 crc kubenswrapper[4711]: E0123 08:48:26.680954 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:48:28 crc kubenswrapper[4711]: I0123 08:48:28.699142 4711 generic.go:334] "Generic (PLEG): container finished" podID="83abb416-d991-4a35-a771-a54da2c53e0c" containerID="c8d3881e274a6b070aff46f00b3cd110c1acaee33c0d643828bcc7126b8f51eb" exitCode=0 Jan 23 08:48:28 crc kubenswrapper[4711]: I0123 08:48:28.699193 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" event={"ID":"83abb416-d991-4a35-a771-a54da2c53e0c","Type":"ContainerDied","Data":"c8d3881e274a6b070aff46f00b3cd110c1acaee33c0d643828bcc7126b8f51eb"} Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.086770 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.256289 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8v4wc\" (UniqueName: \"kubernetes.io/projected/83abb416-d991-4a35-a771-a54da2c53e0c-kube-api-access-8v4wc\") pod \"83abb416-d991-4a35-a771-a54da2c53e0c\" (UID: \"83abb416-d991-4a35-a771-a54da2c53e0c\") " Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.256603 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83abb416-d991-4a35-a771-a54da2c53e0c-scripts\") pod \"83abb416-d991-4a35-a771-a54da2c53e0c\" (UID: \"83abb416-d991-4a35-a771-a54da2c53e0c\") " Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.256695 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83abb416-d991-4a35-a771-a54da2c53e0c-config-data\") pod \"83abb416-d991-4a35-a771-a54da2c53e0c\" (UID: \"83abb416-d991-4a35-a771-a54da2c53e0c\") " Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.262226 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83abb416-d991-4a35-a771-a54da2c53e0c-scripts" (OuterVolumeSpecName: "scripts") pod "83abb416-d991-4a35-a771-a54da2c53e0c" (UID: "83abb416-d991-4a35-a771-a54da2c53e0c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.262599 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83abb416-d991-4a35-a771-a54da2c53e0c-kube-api-access-8v4wc" (OuterVolumeSpecName: "kube-api-access-8v4wc") pod "83abb416-d991-4a35-a771-a54da2c53e0c" (UID: "83abb416-d991-4a35-a771-a54da2c53e0c"). InnerVolumeSpecName "kube-api-access-8v4wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.285294 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83abb416-d991-4a35-a771-a54da2c53e0c-config-data" (OuterVolumeSpecName: "config-data") pod "83abb416-d991-4a35-a771-a54da2c53e0c" (UID: "83abb416-d991-4a35-a771-a54da2c53e0c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.358716 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8v4wc\" (UniqueName: \"kubernetes.io/projected/83abb416-d991-4a35-a771-a54da2c53e0c-kube-api-access-8v4wc\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.358764 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83abb416-d991-4a35-a771-a54da2c53e0c-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.358774 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83abb416-d991-4a35-a771-a54da2c53e0c-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.716659 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" event={"ID":"83abb416-d991-4a35-a771-a54da2c53e0c","Type":"ContainerDied","Data":"5d90f16035f0cef6046a761544d9f2cff5d3d147d19d6882c6ba511a624f2b0e"} Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.716704 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d90f16035f0cef6046a761544d9f2cff5d3d147d19d6882c6ba511a624f2b0e" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.716731 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.793614 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:48:30 crc kubenswrapper[4711]: E0123 08:48:30.794008 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83abb416-d991-4a35-a771-a54da2c53e0c" containerName="nova-kuttl-cell0-conductor-db-sync" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.794031 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="83abb416-d991-4a35-a771-a54da2c53e0c" containerName="nova-kuttl-cell0-conductor-db-sync" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.794217 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="83abb416-d991-4a35-a771-a54da2c53e0c" containerName="nova-kuttl-cell0-conductor-db-sync" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.794870 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.798385 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-config-data" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.798465 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-nova-kuttl-dockercfg-8wdzg" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.809648 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.867487 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgk9n\" (UniqueName: \"kubernetes.io/projected/fc11dc78-b7be-4509-818f-0d4cb6c97931-kube-api-access-kgk9n\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"fc11dc78-b7be-4509-818f-0d4cb6c97931\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.867570 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc11dc78-b7be-4509-818f-0d4cb6c97931-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"fc11dc78-b7be-4509-818f-0d4cb6c97931\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.968474 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgk9n\" (UniqueName: \"kubernetes.io/projected/fc11dc78-b7be-4509-818f-0d4cb6c97931-kube-api-access-kgk9n\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"fc11dc78-b7be-4509-818f-0d4cb6c97931\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.968548 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc11dc78-b7be-4509-818f-0d4cb6c97931-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"fc11dc78-b7be-4509-818f-0d4cb6c97931\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.973172 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc11dc78-b7be-4509-818f-0d4cb6c97931-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"fc11dc78-b7be-4509-818f-0d4cb6c97931\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:48:30 crc kubenswrapper[4711]: I0123 08:48:30.990578 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgk9n\" (UniqueName: \"kubernetes.io/projected/fc11dc78-b7be-4509-818f-0d4cb6c97931-kube-api-access-kgk9n\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"fc11dc78-b7be-4509-818f-0d4cb6c97931\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:48:31 crc kubenswrapper[4711]: I0123 08:48:31.113213 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:48:31 crc kubenswrapper[4711]: I0123 08:48:31.537377 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:48:31 crc kubenswrapper[4711]: I0123 08:48:31.725638 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"fc11dc78-b7be-4509-818f-0d4cb6c97931","Type":"ContainerStarted","Data":"d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3"} Jan 23 08:48:31 crc kubenswrapper[4711]: I0123 08:48:31.726026 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"fc11dc78-b7be-4509-818f-0d4cb6c97931","Type":"ContainerStarted","Data":"93baa213b8cc19096e00b6bcb0d159dcb4fcf8c8f7453e7a9e138dfb5664842c"} Jan 23 08:48:31 crc kubenswrapper[4711]: I0123 08:48:31.726163 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:48:31 crc kubenswrapper[4711]: I0123 08:48:31.739719 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" podStartSLOduration=1.739696 podStartE2EDuration="1.739696s" podCreationTimestamp="2026-01-23 08:48:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:31.737821183 +0000 UTC m=+1697.310777551" watchObservedRunningTime="2026-01-23 08:48:31.739696 +0000 UTC m=+1697.312652368" Jan 23 08:48:36 crc kubenswrapper[4711]: I0123 08:48:36.143972 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:48:36 crc kubenswrapper[4711]: I0123 08:48:36.821019 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs"] Jan 23 08:48:36 crc kubenswrapper[4711]: I0123 08:48:36.822146 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:36 crc kubenswrapper[4711]: I0123 08:48:36.824594 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-manage-scripts" Jan 23 08:48:36 crc kubenswrapper[4711]: I0123 08:48:36.824711 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-manage-config-data" Jan 23 08:48:36 crc kubenswrapper[4711]: I0123 08:48:36.841979 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs"] Jan 23 08:48:36 crc kubenswrapper[4711]: I0123 08:48:36.957297 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-config-data\") pod \"nova-kuttl-cell0-cell-mapping-4z6gs\" (UID: \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:36 crc kubenswrapper[4711]: I0123 08:48:36.957385 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbpcf\" (UniqueName: \"kubernetes.io/projected/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-kube-api-access-lbpcf\") pod \"nova-kuttl-cell0-cell-mapping-4z6gs\" (UID: \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:36 crc kubenswrapper[4711]: I0123 08:48:36.957437 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-scripts\") pod \"nova-kuttl-cell0-cell-mapping-4z6gs\" (UID: \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.000653 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.002235 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.005336 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-api-config-data" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.031362 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.045558 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.046980 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.049679 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-novncproxy-config-data" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.058948 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-config-data\") pod \"nova-kuttl-cell0-cell-mapping-4z6gs\" (UID: \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.059017 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbpcf\" (UniqueName: \"kubernetes.io/projected/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-kube-api-access-lbpcf\") pod \"nova-kuttl-cell0-cell-mapping-4z6gs\" (UID: \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.059057 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-scripts\") pod \"nova-kuttl-cell0-cell-mapping-4z6gs\" (UID: \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.069763 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.076797 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-scripts\") pod \"nova-kuttl-cell0-cell-mapping-4z6gs\" (UID: \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.087739 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-config-data\") pod \"nova-kuttl-cell0-cell-mapping-4z6gs\" (UID: \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.097333 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbpcf\" (UniqueName: \"kubernetes.io/projected/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-kube-api-access-lbpcf\") pod \"nova-kuttl-cell0-cell-mapping-4z6gs\" (UID: \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.145668 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.162314 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.165547 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8b8g\" (UniqueName: \"kubernetes.io/projected/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-kube-api-access-z8b8g\") pod \"nova-kuttl-api-0\" (UID: \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.166074 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.166617 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-metadata-config-data" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.167589 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-config-data\") pod \"nova-kuttl-api-0\" (UID: \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.167771 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlzgl\" (UniqueName: \"kubernetes.io/projected/25110455-b1b8-4c71-ba91-667073ffc1fa-kube-api-access-nlzgl\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"25110455-b1b8-4c71-ba91-667073ffc1fa\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.167996 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-logs\") pod \"nova-kuttl-api-0\" (UID: \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.168094 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25110455-b1b8-4c71-ba91-667073ffc1fa-config-data\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"25110455-b1b8-4c71-ba91-667073ffc1fa\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.168658 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.240831 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.243132 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.250910 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.253732 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-scheduler-config-data" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.273474 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-config-data\") pod \"nova-kuttl-api-0\" (UID: \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.274200 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlzgl\" (UniqueName: \"kubernetes.io/projected/25110455-b1b8-4c71-ba91-667073ffc1fa-kube-api-access-nlzgl\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"25110455-b1b8-4c71-ba91-667073ffc1fa\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.274424 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk27j\" (UniqueName: \"kubernetes.io/projected/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-kube-api-access-fk27j\") pod \"nova-kuttl-metadata-0\" (UID: \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.274539 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-logs\") pod \"nova-kuttl-api-0\" (UID: \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.274663 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25110455-b1b8-4c71-ba91-667073ffc1fa-config-data\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"25110455-b1b8-4c71-ba91-667073ffc1fa\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.274801 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.274882 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae11979-6934-4330-9924-cc0ac6ac8196-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"4ae11979-6934-4330-9924-cc0ac6ac8196\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.274974 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7bt5\" (UniqueName: \"kubernetes.io/projected/4ae11979-6934-4330-9924-cc0ac6ac8196-kube-api-access-p7bt5\") pod \"nova-kuttl-scheduler-0\" (UID: \"4ae11979-6934-4330-9924-cc0ac6ac8196\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.275057 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.275196 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8b8g\" (UniqueName: \"kubernetes.io/projected/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-kube-api-access-z8b8g\") pod \"nova-kuttl-api-0\" (UID: \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.274934 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-logs\") pod \"nova-kuttl-api-0\" (UID: \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.283554 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-config-data\") pod \"nova-kuttl-api-0\" (UID: \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.288336 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25110455-b1b8-4c71-ba91-667073ffc1fa-config-data\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"25110455-b1b8-4c71-ba91-667073ffc1fa\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.299177 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlzgl\" (UniqueName: \"kubernetes.io/projected/25110455-b1b8-4c71-ba91-667073ffc1fa-kube-api-access-nlzgl\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"25110455-b1b8-4c71-ba91-667073ffc1fa\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.312610 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8b8g\" (UniqueName: \"kubernetes.io/projected/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-kube-api-access-z8b8g\") pod \"nova-kuttl-api-0\" (UID: \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.325023 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.376439 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk27j\" (UniqueName: \"kubernetes.io/projected/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-kube-api-access-fk27j\") pod \"nova-kuttl-metadata-0\" (UID: \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.376567 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.376598 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae11979-6934-4330-9924-cc0ac6ac8196-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"4ae11979-6934-4330-9924-cc0ac6ac8196\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.376625 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7bt5\" (UniqueName: \"kubernetes.io/projected/4ae11979-6934-4330-9924-cc0ac6ac8196-kube-api-access-p7bt5\") pod \"nova-kuttl-scheduler-0\" (UID: \"4ae11979-6934-4330-9924-cc0ac6ac8196\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.376646 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.377257 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.378867 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.381452 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.384025 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae11979-6934-4330-9924-cc0ac6ac8196-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"4ae11979-6934-4330-9924-cc0ac6ac8196\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.395938 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk27j\" (UniqueName: \"kubernetes.io/projected/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-kube-api-access-fk27j\") pod \"nova-kuttl-metadata-0\" (UID: \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.422407 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7bt5\" (UniqueName: \"kubernetes.io/projected/4ae11979-6934-4330-9924-cc0ac6ac8196-kube-api-access-p7bt5\") pod \"nova-kuttl-scheduler-0\" (UID: \"4ae11979-6934-4330-9924-cc0ac6ac8196\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.555617 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.620589 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.708573 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs"] Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.819639 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh"] Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.820857 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.823570 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" event={"ID":"5b4b00a4-3b27-43b4-9d01-bc25c89a1176","Type":"ContainerStarted","Data":"0762f3f50df508c4d121556e347e6f61c5b467ffe75a14df9aea60013e9d2b80"} Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.827947 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-scripts" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.828182 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-config-data" Jan 23 08:48:37 crc kubenswrapper[4711]: W0123 08:48:37.841171 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9c3759e_88e1_4cbc_8c5c_d6b55625dc5c.slice/crio-fb71335dd9cbe2c9d729c325e37dcdec4eddf9350f44400380d9d32106a9bad0 WatchSource:0}: Error finding container fb71335dd9cbe2c9d729c325e37dcdec4eddf9350f44400380d9d32106a9bad0: Status 404 returned error can't find the container with id fb71335dd9cbe2c9d729c325e37dcdec4eddf9350f44400380d9d32106a9bad0 Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.843170 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh"] Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.852641 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.950874 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:48:37 crc kubenswrapper[4711]: W0123 08:48:37.956024 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25110455_b1b8_4c71_ba91_667073ffc1fa.slice/crio-424d4b4a3531d00d848fd6ed8acf8ca6830c57a89f9a48d7c9572feb5d08b9dd WatchSource:0}: Error finding container 424d4b4a3531d00d848fd6ed8acf8ca6830c57a89f9a48d7c9572feb5d08b9dd: Status 404 returned error can't find the container with id 424d4b4a3531d00d848fd6ed8acf8ca6830c57a89f9a48d7c9572feb5d08b9dd Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.986675 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5269eca1-6667-4df6-8c89-4fafab283186-scripts\") pod \"nova-kuttl-cell1-conductor-db-sync-kmnlh\" (UID: \"5269eca1-6667-4df6-8c89-4fafab283186\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.986938 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bmvs\" (UniqueName: \"kubernetes.io/projected/5269eca1-6667-4df6-8c89-4fafab283186-kube-api-access-7bmvs\") pod \"nova-kuttl-cell1-conductor-db-sync-kmnlh\" (UID: \"5269eca1-6667-4df6-8c89-4fafab283186\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:37 crc kubenswrapper[4711]: I0123 08:48:37.987123 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5269eca1-6667-4df6-8c89-4fafab283186-config-data\") pod \"nova-kuttl-cell1-conductor-db-sync-kmnlh\" (UID: \"5269eca1-6667-4df6-8c89-4fafab283186\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.092567 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5269eca1-6667-4df6-8c89-4fafab283186-scripts\") pod \"nova-kuttl-cell1-conductor-db-sync-kmnlh\" (UID: \"5269eca1-6667-4df6-8c89-4fafab283186\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.093596 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bmvs\" (UniqueName: \"kubernetes.io/projected/5269eca1-6667-4df6-8c89-4fafab283186-kube-api-access-7bmvs\") pod \"nova-kuttl-cell1-conductor-db-sync-kmnlh\" (UID: \"5269eca1-6667-4df6-8c89-4fafab283186\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.093691 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5269eca1-6667-4df6-8c89-4fafab283186-config-data\") pod \"nova-kuttl-cell1-conductor-db-sync-kmnlh\" (UID: \"5269eca1-6667-4df6-8c89-4fafab283186\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.098777 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5269eca1-6667-4df6-8c89-4fafab283186-scripts\") pod \"nova-kuttl-cell1-conductor-db-sync-kmnlh\" (UID: \"5269eca1-6667-4df6-8c89-4fafab283186\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.098895 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5269eca1-6667-4df6-8c89-4fafab283186-config-data\") pod \"nova-kuttl-cell1-conductor-db-sync-kmnlh\" (UID: \"5269eca1-6667-4df6-8c89-4fafab283186\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.100650 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.112000 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bmvs\" (UniqueName: \"kubernetes.io/projected/5269eca1-6667-4df6-8c89-4fafab283186-kube-api-access-7bmvs\") pod \"nova-kuttl-cell1-conductor-db-sync-kmnlh\" (UID: \"5269eca1-6667-4df6-8c89-4fafab283186\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.155015 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.243117 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:38 crc kubenswrapper[4711]: W0123 08:48:38.256185 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ae11979_6934_4330_9924_cc0ac6ac8196.slice/crio-43ba48523801f1676ba00a7c333669aa448dfbfc390365a7534693f839caf69c WatchSource:0}: Error finding container 43ba48523801f1676ba00a7c333669aa448dfbfc390365a7534693f839caf69c: Status 404 returned error can't find the container with id 43ba48523801f1676ba00a7c333669aa448dfbfc390365a7534693f839caf69c Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.690320 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh"] Jan 23 08:48:38 crc kubenswrapper[4711]: W0123 08:48:38.692014 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5269eca1_6667_4df6_8c89_4fafab283186.slice/crio-33ace04fd8d8ce982976dd0bb90640336609a50d41ff2e69df186f1965c1552a WatchSource:0}: Error finding container 33ace04fd8d8ce982976dd0bb90640336609a50d41ff2e69df186f1965c1552a: Status 404 returned error can't find the container with id 33ace04fd8d8ce982976dd0bb90640336609a50d41ff2e69df186f1965c1552a Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.832132 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" event={"ID":"5269eca1-6667-4df6-8c89-4fafab283186","Type":"ContainerStarted","Data":"33ace04fd8d8ce982976dd0bb90640336609a50d41ff2e69df186f1965c1552a"} Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.833807 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"25110455-b1b8-4c71-ba91-667073ffc1fa","Type":"ContainerStarted","Data":"328adb3acdaa5d43f27cdee12f97e8a5b4e75490b7ace520bf4af4f7ff02e4c9"} Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.833847 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"25110455-b1b8-4c71-ba91-667073ffc1fa","Type":"ContainerStarted","Data":"424d4b4a3531d00d848fd6ed8acf8ca6830c57a89f9a48d7c9572feb5d08b9dd"} Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.835274 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"4ae11979-6934-4330-9924-cc0ac6ac8196","Type":"ContainerStarted","Data":"ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f"} Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.835320 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"4ae11979-6934-4330-9924-cc0ac6ac8196","Type":"ContainerStarted","Data":"43ba48523801f1676ba00a7c333669aa448dfbfc390365a7534693f839caf69c"} Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.841308 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" event={"ID":"5b4b00a4-3b27-43b4-9d01-bc25c89a1176","Type":"ContainerStarted","Data":"992d2b13977571fdabd23e5d781f6a965f4e03753fbc32cc44cdfeea87a51504"} Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.843265 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"5f83bb15-6548-4253-bdd5-9cb9f8f936b1","Type":"ContainerStarted","Data":"7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d"} Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.843300 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"5f83bb15-6548-4253-bdd5-9cb9f8f936b1","Type":"ContainerStarted","Data":"4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1"} Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.843312 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"5f83bb15-6548-4253-bdd5-9cb9f8f936b1","Type":"ContainerStarted","Data":"0eb49b2a731afbeb319af5a5ee43fba4dd6d617698a961f2f265edbf2a0fe9a1"} Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.855841 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c","Type":"ContainerStarted","Data":"4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869"} Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.855900 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c","Type":"ContainerStarted","Data":"6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc"} Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.855912 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c","Type":"ContainerStarted","Data":"fb71335dd9cbe2c9d729c325e37dcdec4eddf9350f44400380d9d32106a9bad0"} Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.856279 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" podStartSLOduration=1.856258329 podStartE2EDuration="1.856258329s" podCreationTimestamp="2026-01-23 08:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:38.848239723 +0000 UTC m=+1704.421196091" watchObservedRunningTime="2026-01-23 08:48:38.856258329 +0000 UTC m=+1704.429214697" Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.869578 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podStartSLOduration=1.869558015 podStartE2EDuration="1.869558015s" podCreationTimestamp="2026-01-23 08:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:38.865653579 +0000 UTC m=+1704.438609957" watchObservedRunningTime="2026-01-23 08:48:38.869558015 +0000 UTC m=+1704.442514383" Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.891501 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-0" podStartSLOduration=1.891483122 podStartE2EDuration="1.891483122s" podCreationTimestamp="2026-01-23 08:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:38.890946009 +0000 UTC m=+1704.463902377" watchObservedRunningTime="2026-01-23 08:48:38.891483122 +0000 UTC m=+1704.464439490" Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.931387 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" podStartSLOduration=2.9313673 podStartE2EDuration="2.9313673s" podCreationTimestamp="2026-01-23 08:48:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:38.91258247 +0000 UTC m=+1704.485538838" watchObservedRunningTime="2026-01-23 08:48:38.9313673 +0000 UTC m=+1704.504323668" Jan 23 08:48:38 crc kubenswrapper[4711]: I0123 08:48:38.935534 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-0" podStartSLOduration=2.935488711 podStartE2EDuration="2.935488711s" podCreationTimestamp="2026-01-23 08:48:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:38.927524785 +0000 UTC m=+1704.500481153" watchObservedRunningTime="2026-01-23 08:48:38.935488711 +0000 UTC m=+1704.508445079" Jan 23 08:48:39 crc kubenswrapper[4711]: I0123 08:48:39.866289 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" event={"ID":"5269eca1-6667-4df6-8c89-4fafab283186","Type":"ContainerStarted","Data":"0df3a2a8b47c6371146331aae739f5f00207a75c22cb222c62519a7c798331d4"} Jan 23 08:48:39 crc kubenswrapper[4711]: I0123 08:48:39.887679 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" podStartSLOduration=2.887660713 podStartE2EDuration="2.887660713s" podCreationTimestamp="2026-01-23 08:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:39.880111499 +0000 UTC m=+1705.453067857" watchObservedRunningTime="2026-01-23 08:48:39.887660713 +0000 UTC m=+1705.460617071" Jan 23 08:48:41 crc kubenswrapper[4711]: I0123 08:48:41.473745 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:48:41 crc kubenswrapper[4711]: E0123 08:48:41.474043 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:48:41 crc kubenswrapper[4711]: I0123 08:48:41.881881 4711 generic.go:334] "Generic (PLEG): container finished" podID="5269eca1-6667-4df6-8c89-4fafab283186" containerID="0df3a2a8b47c6371146331aae739f5f00207a75c22cb222c62519a7c798331d4" exitCode=0 Jan 23 08:48:41 crc kubenswrapper[4711]: I0123 08:48:41.882060 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" event={"ID":"5269eca1-6667-4df6-8c89-4fafab283186","Type":"ContainerDied","Data":"0df3a2a8b47c6371146331aae739f5f00207a75c22cb222c62519a7c798331d4"} Jan 23 08:48:42 crc kubenswrapper[4711]: I0123 08:48:42.377857 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:42 crc kubenswrapper[4711]: I0123 08:48:42.556613 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:42 crc kubenswrapper[4711]: I0123 08:48:42.556671 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:42 crc kubenswrapper[4711]: I0123 08:48:42.621165 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.212852 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.406038 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5269eca1-6667-4df6-8c89-4fafab283186-config-data\") pod \"5269eca1-6667-4df6-8c89-4fafab283186\" (UID: \"5269eca1-6667-4df6-8c89-4fafab283186\") " Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.406145 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bmvs\" (UniqueName: \"kubernetes.io/projected/5269eca1-6667-4df6-8c89-4fafab283186-kube-api-access-7bmvs\") pod \"5269eca1-6667-4df6-8c89-4fafab283186\" (UID: \"5269eca1-6667-4df6-8c89-4fafab283186\") " Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.406285 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5269eca1-6667-4df6-8c89-4fafab283186-scripts\") pod \"5269eca1-6667-4df6-8c89-4fafab283186\" (UID: \"5269eca1-6667-4df6-8c89-4fafab283186\") " Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.415682 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5269eca1-6667-4df6-8c89-4fafab283186-scripts" (OuterVolumeSpecName: "scripts") pod "5269eca1-6667-4df6-8c89-4fafab283186" (UID: "5269eca1-6667-4df6-8c89-4fafab283186"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.419843 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5269eca1-6667-4df6-8c89-4fafab283186-kube-api-access-7bmvs" (OuterVolumeSpecName: "kube-api-access-7bmvs") pod "5269eca1-6667-4df6-8c89-4fafab283186" (UID: "5269eca1-6667-4df6-8c89-4fafab283186"). InnerVolumeSpecName "kube-api-access-7bmvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.429134 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5269eca1-6667-4df6-8c89-4fafab283186-config-data" (OuterVolumeSpecName: "config-data") pod "5269eca1-6667-4df6-8c89-4fafab283186" (UID: "5269eca1-6667-4df6-8c89-4fafab283186"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.507993 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bmvs\" (UniqueName: \"kubernetes.io/projected/5269eca1-6667-4df6-8c89-4fafab283186-kube-api-access-7bmvs\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.508477 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5269eca1-6667-4df6-8c89-4fafab283186-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.508502 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5269eca1-6667-4df6-8c89-4fafab283186-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.902181 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" event={"ID":"5269eca1-6667-4df6-8c89-4fafab283186","Type":"ContainerDied","Data":"33ace04fd8d8ce982976dd0bb90640336609a50d41ff2e69df186f1965c1552a"} Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.902267 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33ace04fd8d8ce982976dd0bb90640336609a50d41ff2e69df186f1965c1552a" Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.902229 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh" Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.907985 4711 generic.go:334] "Generic (PLEG): container finished" podID="5b4b00a4-3b27-43b4-9d01-bc25c89a1176" containerID="992d2b13977571fdabd23e5d781f6a965f4e03753fbc32cc44cdfeea87a51504" exitCode=0 Jan 23 08:48:43 crc kubenswrapper[4711]: I0123 08:48:43.908038 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" event={"ID":"5b4b00a4-3b27-43b4-9d01-bc25c89a1176","Type":"ContainerDied","Data":"992d2b13977571fdabd23e5d781f6a965f4e03753fbc32cc44cdfeea87a51504"} Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:43.999910 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:48:44 crc kubenswrapper[4711]: E0123 08:48:44.000256 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5269eca1-6667-4df6-8c89-4fafab283186" containerName="nova-kuttl-cell1-conductor-db-sync" Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.000286 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5269eca1-6667-4df6-8c89-4fafab283186" containerName="nova-kuttl-cell1-conductor-db-sync" Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.000456 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5269eca1-6667-4df6-8c89-4fafab283186" containerName="nova-kuttl-cell1-conductor-db-sync" Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.000969 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.009623 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.010082 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-config-data" Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.030468 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8f9v\" (UniqueName: \"kubernetes.io/projected/fd86e220-6b63-49cd-bd8c-58a22c39ba68-kube-api-access-t8f9v\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"fd86e220-6b63-49cd-bd8c-58a22c39ba68\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.030545 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd86e220-6b63-49cd-bd8c-58a22c39ba68-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"fd86e220-6b63-49cd-bd8c-58a22c39ba68\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.132633 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8f9v\" (UniqueName: \"kubernetes.io/projected/fd86e220-6b63-49cd-bd8c-58a22c39ba68-kube-api-access-t8f9v\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"fd86e220-6b63-49cd-bd8c-58a22c39ba68\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.132719 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd86e220-6b63-49cd-bd8c-58a22c39ba68-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"fd86e220-6b63-49cd-bd8c-58a22c39ba68\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.149745 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd86e220-6b63-49cd-bd8c-58a22c39ba68-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"fd86e220-6b63-49cd-bd8c-58a22c39ba68\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.151945 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8f9v\" (UniqueName: \"kubernetes.io/projected/fd86e220-6b63-49cd-bd8c-58a22c39ba68-kube-api-access-t8f9v\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"fd86e220-6b63-49cd-bd8c-58a22c39ba68\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.334809 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.798279 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:48:44 crc kubenswrapper[4711]: I0123 08:48:44.917636 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"fd86e220-6b63-49cd-bd8c-58a22c39ba68","Type":"ContainerStarted","Data":"e30045e02ec5be8c3cc75118be47b9250acf8e96c43a9536aae950a928364038"} Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.213077 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.351462 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-scripts\") pod \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\" (UID: \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\") " Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.351599 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-config-data\") pod \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\" (UID: \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\") " Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.351627 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbpcf\" (UniqueName: \"kubernetes.io/projected/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-kube-api-access-lbpcf\") pod \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\" (UID: \"5b4b00a4-3b27-43b4-9d01-bc25c89a1176\") " Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.370588 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-scripts" (OuterVolumeSpecName: "scripts") pod "5b4b00a4-3b27-43b4-9d01-bc25c89a1176" (UID: "5b4b00a4-3b27-43b4-9d01-bc25c89a1176"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.370629 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-kube-api-access-lbpcf" (OuterVolumeSpecName: "kube-api-access-lbpcf") pod "5b4b00a4-3b27-43b4-9d01-bc25c89a1176" (UID: "5b4b00a4-3b27-43b4-9d01-bc25c89a1176"). InnerVolumeSpecName "kube-api-access-lbpcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.376261 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-config-data" (OuterVolumeSpecName: "config-data") pod "5b4b00a4-3b27-43b4-9d01-bc25c89a1176" (UID: "5b4b00a4-3b27-43b4-9d01-bc25c89a1176"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.453208 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbpcf\" (UniqueName: \"kubernetes.io/projected/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-kube-api-access-lbpcf\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.453251 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.453270 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b4b00a4-3b27-43b4-9d01-bc25c89a1176-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.927206 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" event={"ID":"5b4b00a4-3b27-43b4-9d01-bc25c89a1176","Type":"ContainerDied","Data":"0762f3f50df508c4d121556e347e6f61c5b467ffe75a14df9aea60013e9d2b80"} Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.928440 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0762f3f50df508c4d121556e347e6f61c5b467ffe75a14df9aea60013e9d2b80" Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.927225 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs" Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.929421 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"fd86e220-6b63-49cd-bd8c-58a22c39ba68","Type":"ContainerStarted","Data":"eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17"} Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.929564 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:48:45 crc kubenswrapper[4711]: I0123 08:48:45.955399 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" podStartSLOduration=2.955379742 podStartE2EDuration="2.955379742s" podCreationTimestamp="2026-01-23 08:48:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:45.947820666 +0000 UTC m=+1711.520777044" watchObservedRunningTime="2026-01-23 08:48:45.955379742 +0000 UTC m=+1711.528336110" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.116952 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.117418 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" containerName="nova-kuttl-api-log" containerID="cri-o://6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc" gracePeriod=30 Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.117598 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" containerName="nova-kuttl-api-api" containerID="cri-o://4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869" gracePeriod=30 Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.176618 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.176871 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="4ae11979-6934-4330-9924-cc0ac6ac8196" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f" gracePeriod=30 Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.201052 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.201260 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="5f83bb15-6548-4253-bdd5-9cb9f8f936b1" containerName="nova-kuttl-metadata-log" containerID="cri-o://4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1" gracePeriod=30 Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.201374 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="5f83bb15-6548-4253-bdd5-9cb9f8f936b1" containerName="nova-kuttl-metadata-metadata" containerID="cri-o://7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d" gracePeriod=30 Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.649896 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.676813 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-logs\") pod \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\" (UID: \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\") " Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.676929 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-config-data\") pod \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\" (UID: \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\") " Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.677101 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8b8g\" (UniqueName: \"kubernetes.io/projected/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-kube-api-access-z8b8g\") pod \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\" (UID: \"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c\") " Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.677341 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-logs" (OuterVolumeSpecName: "logs") pod "c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" (UID: "c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.677678 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.682827 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-kube-api-access-z8b8g" (OuterVolumeSpecName: "kube-api-access-z8b8g") pod "c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" (UID: "c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c"). InnerVolumeSpecName "kube-api-access-z8b8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.705452 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-config-data" (OuterVolumeSpecName: "config-data") pod "c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" (UID: "c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.725683 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.778195 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fk27j\" (UniqueName: \"kubernetes.io/projected/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-kube-api-access-fk27j\") pod \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\" (UID: \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\") " Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.778360 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-logs\") pod \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\" (UID: \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\") " Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.778490 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-config-data\") pod \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\" (UID: \"5f83bb15-6548-4253-bdd5-9cb9f8f936b1\") " Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.778906 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8b8g\" (UniqueName: \"kubernetes.io/projected/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-kube-api-access-z8b8g\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.778926 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.779080 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-logs" (OuterVolumeSpecName: "logs") pod "5f83bb15-6548-4253-bdd5-9cb9f8f936b1" (UID: "5f83bb15-6548-4253-bdd5-9cb9f8f936b1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.783087 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-kube-api-access-fk27j" (OuterVolumeSpecName: "kube-api-access-fk27j") pod "5f83bb15-6548-4253-bdd5-9cb9f8f936b1" (UID: "5f83bb15-6548-4253-bdd5-9cb9f8f936b1"). InnerVolumeSpecName "kube-api-access-fk27j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.799903 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-config-data" (OuterVolumeSpecName: "config-data") pod "5f83bb15-6548-4253-bdd5-9cb9f8f936b1" (UID: "5f83bb15-6548-4253-bdd5-9cb9f8f936b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.879741 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.879780 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fk27j\" (UniqueName: \"kubernetes.io/projected/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-kube-api-access-fk27j\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.879793 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f83bb15-6548-4253-bdd5-9cb9f8f936b1-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.939349 4711 generic.go:334] "Generic (PLEG): container finished" podID="5f83bb15-6548-4253-bdd5-9cb9f8f936b1" containerID="7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d" exitCode=0 Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.939387 4711 generic.go:334] "Generic (PLEG): container finished" podID="5f83bb15-6548-4253-bdd5-9cb9f8f936b1" containerID="4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1" exitCode=143 Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.939433 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.940545 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"5f83bb15-6548-4253-bdd5-9cb9f8f936b1","Type":"ContainerDied","Data":"7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d"} Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.940681 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"5f83bb15-6548-4253-bdd5-9cb9f8f936b1","Type":"ContainerDied","Data":"4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1"} Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.940797 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"5f83bb15-6548-4253-bdd5-9cb9f8f936b1","Type":"ContainerDied","Data":"0eb49b2a731afbeb319af5a5ee43fba4dd6d617698a961f2f265edbf2a0fe9a1"} Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.940772 4711 scope.go:117] "RemoveContainer" containerID="7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.941367 4711 generic.go:334] "Generic (PLEG): container finished" podID="c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" containerID="4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869" exitCode=0 Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.941410 4711 generic.go:334] "Generic (PLEG): container finished" podID="c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" containerID="6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc" exitCode=143 Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.941428 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c","Type":"ContainerDied","Data":"4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869"} Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.941459 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c","Type":"ContainerDied","Data":"6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc"} Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.941473 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c","Type":"ContainerDied","Data":"fb71335dd9cbe2c9d729c325e37dcdec4eddf9350f44400380d9d32106a9bad0"} Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.941713 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:46 crc kubenswrapper[4711]: I0123 08:48:46.991651 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.002399 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.003690 4711 scope.go:117] "RemoveContainer" containerID="4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.010077 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.021918 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.035613 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:47 crc kubenswrapper[4711]: E0123 08:48:47.036070 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" containerName="nova-kuttl-api-log" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.036093 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" containerName="nova-kuttl-api-log" Jan 23 08:48:47 crc kubenswrapper[4711]: E0123 08:48:47.036109 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b4b00a4-3b27-43b4-9d01-bc25c89a1176" containerName="nova-manage" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.036119 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b4b00a4-3b27-43b4-9d01-bc25c89a1176" containerName="nova-manage" Jan 23 08:48:47 crc kubenswrapper[4711]: E0123 08:48:47.036151 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f83bb15-6548-4253-bdd5-9cb9f8f936b1" containerName="nova-kuttl-metadata-metadata" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.036160 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f83bb15-6548-4253-bdd5-9cb9f8f936b1" containerName="nova-kuttl-metadata-metadata" Jan 23 08:48:47 crc kubenswrapper[4711]: E0123 08:48:47.036174 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" containerName="nova-kuttl-api-api" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.036183 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" containerName="nova-kuttl-api-api" Jan 23 08:48:47 crc kubenswrapper[4711]: E0123 08:48:47.036193 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f83bb15-6548-4253-bdd5-9cb9f8f936b1" containerName="nova-kuttl-metadata-log" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.036202 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f83bb15-6548-4253-bdd5-9cb9f8f936b1" containerName="nova-kuttl-metadata-log" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.036421 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f83bb15-6548-4253-bdd5-9cb9f8f936b1" containerName="nova-kuttl-metadata-log" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.036439 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f83bb15-6548-4253-bdd5-9cb9f8f936b1" containerName="nova-kuttl-metadata-metadata" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.036456 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b4b00a4-3b27-43b4-9d01-bc25c89a1176" containerName="nova-manage" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.036467 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" containerName="nova-kuttl-api-log" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.036475 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" containerName="nova-kuttl-api-api" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.037564 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.041263 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-metadata-config-data" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.044748 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.046004 4711 scope.go:117] "RemoveContainer" containerID="7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.054777 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.056560 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:47 crc kubenswrapper[4711]: E0123 08:48:47.056981 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d\": container with ID starting with 7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d not found: ID does not exist" containerID="7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.057019 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d"} err="failed to get container status \"7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d\": rpc error: code = NotFound desc = could not find container \"7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d\": container with ID starting with 7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d not found: ID does not exist" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.057041 4711 scope.go:117] "RemoveContainer" containerID="4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.058742 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-api-config-data" Jan 23 08:48:47 crc kubenswrapper[4711]: E0123 08:48:47.058737 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1\": container with ID starting with 4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1 not found: ID does not exist" containerID="4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.058979 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1"} err="failed to get container status \"4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1\": rpc error: code = NotFound desc = could not find container \"4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1\": container with ID starting with 4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1 not found: ID does not exist" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.059213 4711 scope.go:117] "RemoveContainer" containerID="7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.063721 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.063832 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d"} err="failed to get container status \"7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d\": rpc error: code = NotFound desc = could not find container \"7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d\": container with ID starting with 7899c9fc749b10240aeef4dea0ed55882382eebe770328e5a1e6affc3ef35b9d not found: ID does not exist" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.063862 4711 scope.go:117] "RemoveContainer" containerID="4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.067003 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1"} err="failed to get container status \"4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1\": rpc error: code = NotFound desc = could not find container \"4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1\": container with ID starting with 4229761c1a62101600dbe3f99a3a00ea6bcfb9cfba86584b70d5c899aea848b1 not found: ID does not exist" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.067032 4711 scope.go:117] "RemoveContainer" containerID="4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.088276 4711 scope.go:117] "RemoveContainer" containerID="6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.089347 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.089824 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.090614 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fth4k\" (UniqueName: \"kubernetes.io/projected/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-kube-api-access-fth4k\") pod \"nova-kuttl-metadata-0\" (UID: \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.107606 4711 scope.go:117] "RemoveContainer" containerID="4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869" Jan 23 08:48:47 crc kubenswrapper[4711]: E0123 08:48:47.108066 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869\": container with ID starting with 4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869 not found: ID does not exist" containerID="4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.108113 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869"} err="failed to get container status \"4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869\": rpc error: code = NotFound desc = could not find container \"4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869\": container with ID starting with 4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869 not found: ID does not exist" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.108140 4711 scope.go:117] "RemoveContainer" containerID="6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc" Jan 23 08:48:47 crc kubenswrapper[4711]: E0123 08:48:47.108921 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc\": container with ID starting with 6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc not found: ID does not exist" containerID="6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.108955 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc"} err="failed to get container status \"6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc\": rpc error: code = NotFound desc = could not find container \"6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc\": container with ID starting with 6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc not found: ID does not exist" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.108976 4711 scope.go:117] "RemoveContainer" containerID="4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.109207 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869"} err="failed to get container status \"4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869\": rpc error: code = NotFound desc = could not find container \"4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869\": container with ID starting with 4d5005573bf7dca20b83c624512bcf46ad3791229cd7afd73f0323869d5cd869 not found: ID does not exist" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.109230 4711 scope.go:117] "RemoveContainer" containerID="6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.109484 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc"} err="failed to get container status \"6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc\": rpc error: code = NotFound desc = could not find container \"6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc\": container with ID starting with 6196e40c5db7abf2849bee7b7980920f854802b927dec1aec3f7d074ce6beecc not found: ID does not exist" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.191670 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8596dc4-07c1-4665-9e11-307a245c7555-config-data\") pod \"nova-kuttl-api-0\" (UID: \"e8596dc4-07c1-4665-9e11-307a245c7555\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.191742 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.191772 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8596dc4-07c1-4665-9e11-307a245c7555-logs\") pod \"nova-kuttl-api-0\" (UID: \"e8596dc4-07c1-4665-9e11-307a245c7555\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.191826 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv257\" (UniqueName: \"kubernetes.io/projected/e8596dc4-07c1-4665-9e11-307a245c7555-kube-api-access-pv257\") pod \"nova-kuttl-api-0\" (UID: \"e8596dc4-07c1-4665-9e11-307a245c7555\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.191891 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fth4k\" (UniqueName: \"kubernetes.io/projected/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-kube-api-access-fth4k\") pod \"nova-kuttl-metadata-0\" (UID: \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.192019 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.192454 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.196128 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.211135 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fth4k\" (UniqueName: \"kubernetes.io/projected/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-kube-api-access-fth4k\") pod \"nova-kuttl-metadata-0\" (UID: \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.293466 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8596dc4-07c1-4665-9e11-307a245c7555-config-data\") pod \"nova-kuttl-api-0\" (UID: \"e8596dc4-07c1-4665-9e11-307a245c7555\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.293588 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8596dc4-07c1-4665-9e11-307a245c7555-logs\") pod \"nova-kuttl-api-0\" (UID: \"e8596dc4-07c1-4665-9e11-307a245c7555\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.293638 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv257\" (UniqueName: \"kubernetes.io/projected/e8596dc4-07c1-4665-9e11-307a245c7555-kube-api-access-pv257\") pod \"nova-kuttl-api-0\" (UID: \"e8596dc4-07c1-4665-9e11-307a245c7555\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.294395 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8596dc4-07c1-4665-9e11-307a245c7555-logs\") pod \"nova-kuttl-api-0\" (UID: \"e8596dc4-07c1-4665-9e11-307a245c7555\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.298536 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8596dc4-07c1-4665-9e11-307a245c7555-config-data\") pod \"nova-kuttl-api-0\" (UID: \"e8596dc4-07c1-4665-9e11-307a245c7555\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.314348 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv257\" (UniqueName: \"kubernetes.io/projected/e8596dc4-07c1-4665-9e11-307a245c7555-kube-api-access-pv257\") pod \"nova-kuttl-api-0\" (UID: \"e8596dc4-07c1-4665-9e11-307a245c7555\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.360098 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.378692 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.390197 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.391743 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.496820 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f83bb15-6548-4253-bdd5-9cb9f8f936b1" path="/var/lib/kubelet/pods/5f83bb15-6548-4253-bdd5-9cb9f8f936b1/volumes" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.501003 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c" path="/var/lib/kubelet/pods/c9c3759e-88e1-4cbc-8c5c-d6b55625dc5c/volumes" Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.810956 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:47 crc kubenswrapper[4711]: W0123 08:48:47.824783 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa9d37bb_720a_4bd5_b741_a3a27402d4e3.slice/crio-780e4c4acd7b9ebdfe09b21d65cd610d9c3a33427eebbc832b86265abfb99282 WatchSource:0}: Error finding container 780e4c4acd7b9ebdfe09b21d65cd610d9c3a33427eebbc832b86265abfb99282: Status 404 returned error can't find the container with id 780e4c4acd7b9ebdfe09b21d65cd610d9c3a33427eebbc832b86265abfb99282 Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.898956 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.953441 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"fa9d37bb-720a-4bd5-b741-a3a27402d4e3","Type":"ContainerStarted","Data":"780e4c4acd7b9ebdfe09b21d65cd610d9c3a33427eebbc832b86265abfb99282"} Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.954515 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"e8596dc4-07c1-4665-9e11-307a245c7555","Type":"ContainerStarted","Data":"37866cf797c83f80e0342bc2b88b8070688d690f17dc96cfdac36c7ab0c1c2a7"} Jan 23 08:48:47 crc kubenswrapper[4711]: I0123 08:48:47.969723 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:48:48 crc kubenswrapper[4711]: I0123 08:48:48.981482 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"fa9d37bb-720a-4bd5-b741-a3a27402d4e3","Type":"ContainerStarted","Data":"46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f"} Jan 23 08:48:48 crc kubenswrapper[4711]: I0123 08:48:48.981880 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"fa9d37bb-720a-4bd5-b741-a3a27402d4e3","Type":"ContainerStarted","Data":"1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36"} Jan 23 08:48:48 crc kubenswrapper[4711]: I0123 08:48:48.983626 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"e8596dc4-07c1-4665-9e11-307a245c7555","Type":"ContainerStarted","Data":"e914e648d155cfa7de640deef5e95ee9a47281abdab316fa2e5c1c54adc4f9d5"} Jan 23 08:48:48 crc kubenswrapper[4711]: I0123 08:48:48.983660 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"e8596dc4-07c1-4665-9e11-307a245c7555","Type":"ContainerStarted","Data":"26fde45806ee1a2fc7a5f70afc681ec09d033023bd9d21dc40299a527d06cee2"} Jan 23 08:48:49 crc kubenswrapper[4711]: I0123 08:48:49.002857 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-0" podStartSLOduration=3.002835018 podStartE2EDuration="3.002835018s" podCreationTimestamp="2026-01-23 08:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:48.999960318 +0000 UTC m=+1714.572916686" watchObservedRunningTime="2026-01-23 08:48:49.002835018 +0000 UTC m=+1714.575791386" Jan 23 08:48:49 crc kubenswrapper[4711]: I0123 08:48:49.020809 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-0" podStartSLOduration=3.020788348 podStartE2EDuration="3.020788348s" podCreationTimestamp="2026-01-23 08:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:49.01722084 +0000 UTC m=+1714.590177228" watchObservedRunningTime="2026-01-23 08:48:49.020788348 +0000 UTC m=+1714.593744706" Jan 23 08:48:49 crc kubenswrapper[4711]: I0123 08:48:49.360156 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:48:49 crc kubenswrapper[4711]: I0123 08:48:49.776769 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt"] Jan 23 08:48:49 crc kubenswrapper[4711]: I0123 08:48:49.777854 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:49 crc kubenswrapper[4711]: I0123 08:48:49.780186 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-manage-scripts" Jan 23 08:48:49 crc kubenswrapper[4711]: I0123 08:48:49.781078 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-manage-config-data" Jan 23 08:48:49 crc kubenswrapper[4711]: I0123 08:48:49.788551 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt"] Jan 23 08:48:49 crc kubenswrapper[4711]: I0123 08:48:49.933716 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg96n\" (UniqueName: \"kubernetes.io/projected/ee376276-2fe0-47ce-9002-eae63b9efea2-kube-api-access-pg96n\") pod \"nova-kuttl-cell1-cell-mapping-648xt\" (UID: \"ee376276-2fe0-47ce-9002-eae63b9efea2\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:49 crc kubenswrapper[4711]: I0123 08:48:49.933783 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee376276-2fe0-47ce-9002-eae63b9efea2-config-data\") pod \"nova-kuttl-cell1-cell-mapping-648xt\" (UID: \"ee376276-2fe0-47ce-9002-eae63b9efea2\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:49 crc kubenswrapper[4711]: I0123 08:48:49.933854 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee376276-2fe0-47ce-9002-eae63b9efea2-scripts\") pod \"nova-kuttl-cell1-cell-mapping-648xt\" (UID: \"ee376276-2fe0-47ce-9002-eae63b9efea2\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.035364 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee376276-2fe0-47ce-9002-eae63b9efea2-config-data\") pod \"nova-kuttl-cell1-cell-mapping-648xt\" (UID: \"ee376276-2fe0-47ce-9002-eae63b9efea2\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.036152 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee376276-2fe0-47ce-9002-eae63b9efea2-scripts\") pod \"nova-kuttl-cell1-cell-mapping-648xt\" (UID: \"ee376276-2fe0-47ce-9002-eae63b9efea2\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.036492 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg96n\" (UniqueName: \"kubernetes.io/projected/ee376276-2fe0-47ce-9002-eae63b9efea2-kube-api-access-pg96n\") pod \"nova-kuttl-cell1-cell-mapping-648xt\" (UID: \"ee376276-2fe0-47ce-9002-eae63b9efea2\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.043457 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee376276-2fe0-47ce-9002-eae63b9efea2-config-data\") pod \"nova-kuttl-cell1-cell-mapping-648xt\" (UID: \"ee376276-2fe0-47ce-9002-eae63b9efea2\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.051305 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee376276-2fe0-47ce-9002-eae63b9efea2-scripts\") pod \"nova-kuttl-cell1-cell-mapping-648xt\" (UID: \"ee376276-2fe0-47ce-9002-eae63b9efea2\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.053209 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg96n\" (UniqueName: \"kubernetes.io/projected/ee376276-2fe0-47ce-9002-eae63b9efea2-kube-api-access-pg96n\") pod \"nova-kuttl-cell1-cell-mapping-648xt\" (UID: \"ee376276-2fe0-47ce-9002-eae63b9efea2\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.097550 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.311190 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.443093 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7bt5\" (UniqueName: \"kubernetes.io/projected/4ae11979-6934-4330-9924-cc0ac6ac8196-kube-api-access-p7bt5\") pod \"4ae11979-6934-4330-9924-cc0ac6ac8196\" (UID: \"4ae11979-6934-4330-9924-cc0ac6ac8196\") " Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.443160 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae11979-6934-4330-9924-cc0ac6ac8196-config-data\") pod \"4ae11979-6934-4330-9924-cc0ac6ac8196\" (UID: \"4ae11979-6934-4330-9924-cc0ac6ac8196\") " Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.447096 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ae11979-6934-4330-9924-cc0ac6ac8196-kube-api-access-p7bt5" (OuterVolumeSpecName: "kube-api-access-p7bt5") pod "4ae11979-6934-4330-9924-cc0ac6ac8196" (UID: "4ae11979-6934-4330-9924-cc0ac6ac8196"). InnerVolumeSpecName "kube-api-access-p7bt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.463256 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae11979-6934-4330-9924-cc0ac6ac8196-config-data" (OuterVolumeSpecName: "config-data") pod "4ae11979-6934-4330-9924-cc0ac6ac8196" (UID: "4ae11979-6934-4330-9924-cc0ac6ac8196"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.552946 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7bt5\" (UniqueName: \"kubernetes.io/projected/4ae11979-6934-4330-9924-cc0ac6ac8196-kube-api-access-p7bt5\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.553007 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae11979-6934-4330-9924-cc0ac6ac8196-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:50 crc kubenswrapper[4711]: W0123 08:48:50.585536 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee376276_2fe0_47ce_9002_eae63b9efea2.slice/crio-3ad87abca092375bfa8e7a6db19e93b58c1ef7c3370c55a2c8ac2a6b5ee4cf22 WatchSource:0}: Error finding container 3ad87abca092375bfa8e7a6db19e93b58c1ef7c3370c55a2c8ac2a6b5ee4cf22: Status 404 returned error can't find the container with id 3ad87abca092375bfa8e7a6db19e93b58c1ef7c3370c55a2c8ac2a6b5ee4cf22 Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.586025 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt"] Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.999177 4711 generic.go:334] "Generic (PLEG): container finished" podID="4ae11979-6934-4330-9924-cc0ac6ac8196" containerID="ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f" exitCode=0 Jan 23 08:48:50 crc kubenswrapper[4711]: I0123 08:48:50.999286 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.005573 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"4ae11979-6934-4330-9924-cc0ac6ac8196","Type":"ContainerDied","Data":"ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f"} Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.005630 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"4ae11979-6934-4330-9924-cc0ac6ac8196","Type":"ContainerDied","Data":"43ba48523801f1676ba00a7c333669aa448dfbfc390365a7534693f839caf69c"} Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.005654 4711 scope.go:117] "RemoveContainer" containerID="ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.007663 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" event={"ID":"ee376276-2fe0-47ce-9002-eae63b9efea2","Type":"ContainerStarted","Data":"80771f604e4ec6f268a0dbb6a11e67f206a15dfaeea948a0116c17f0dc4f7f7a"} Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.007709 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" event={"ID":"ee376276-2fe0-47ce-9002-eae63b9efea2","Type":"ContainerStarted","Data":"3ad87abca092375bfa8e7a6db19e93b58c1ef7c3370c55a2c8ac2a6b5ee4cf22"} Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.024339 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" podStartSLOduration=2.024317764 podStartE2EDuration="2.024317764s" podCreationTimestamp="2026-01-23 08:48:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:51.020356157 +0000 UTC m=+1716.593312545" watchObservedRunningTime="2026-01-23 08:48:51.024317764 +0000 UTC m=+1716.597274132" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.048545 4711 scope.go:117] "RemoveContainer" containerID="ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f" Jan 23 08:48:51 crc kubenswrapper[4711]: E0123 08:48:51.049058 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f\": container with ID starting with ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f not found: ID does not exist" containerID="ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.049091 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f"} err="failed to get container status \"ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f\": rpc error: code = NotFound desc = could not find container \"ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f\": container with ID starting with ca0664314fe5db3454d0eaa2fee031b8d66702f1a5480792972bd14d0c5f4a2f not found: ID does not exist" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.055365 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.064448 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.080023 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:51 crc kubenswrapper[4711]: E0123 08:48:51.080719 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ae11979-6934-4330-9924-cc0ac6ac8196" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.080756 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ae11979-6934-4330-9924-cc0ac6ac8196" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.081053 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ae11979-6934-4330-9924-cc0ac6ac8196" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.081943 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.084493 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-scheduler-config-data" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.108322 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.162979 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxwzh\" (UniqueName: \"kubernetes.io/projected/45ddb73c-7ce5-4674-8859-b4f7e60f99a0-kube-api-access-hxwzh\") pod \"nova-kuttl-scheduler-0\" (UID: \"45ddb73c-7ce5-4674-8859-b4f7e60f99a0\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.163123 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45ddb73c-7ce5-4674-8859-b4f7e60f99a0-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"45ddb73c-7ce5-4674-8859-b4f7e60f99a0\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.264687 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45ddb73c-7ce5-4674-8859-b4f7e60f99a0-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"45ddb73c-7ce5-4674-8859-b4f7e60f99a0\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.264890 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxwzh\" (UniqueName: \"kubernetes.io/projected/45ddb73c-7ce5-4674-8859-b4f7e60f99a0-kube-api-access-hxwzh\") pod \"nova-kuttl-scheduler-0\" (UID: \"45ddb73c-7ce5-4674-8859-b4f7e60f99a0\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.280082 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45ddb73c-7ce5-4674-8859-b4f7e60f99a0-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"45ddb73c-7ce5-4674-8859-b4f7e60f99a0\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.280376 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxwzh\" (UniqueName: \"kubernetes.io/projected/45ddb73c-7ce5-4674-8859-b4f7e60f99a0-kube-api-access-hxwzh\") pod \"nova-kuttl-scheduler-0\" (UID: \"45ddb73c-7ce5-4674-8859-b4f7e60f99a0\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.401448 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.485059 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ae11979-6934-4330-9924-cc0ac6ac8196" path="/var/lib/kubelet/pods/4ae11979-6934-4330-9924-cc0ac6ac8196/volumes" Jan 23 08:48:51 crc kubenswrapper[4711]: W0123 08:48:51.828043 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45ddb73c_7ce5_4674_8859_b4f7e60f99a0.slice/crio-c9c8583c1a1dced52a30823095ea208b933a53b80dc8a68709c251bbfa3a6ce5 WatchSource:0}: Error finding container c9c8583c1a1dced52a30823095ea208b933a53b80dc8a68709c251bbfa3a6ce5: Status 404 returned error can't find the container with id c9c8583c1a1dced52a30823095ea208b933a53b80dc8a68709c251bbfa3a6ce5 Jan 23 08:48:51 crc kubenswrapper[4711]: I0123 08:48:51.833470 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:52 crc kubenswrapper[4711]: I0123 08:48:52.020055 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"45ddb73c-7ce5-4674-8859-b4f7e60f99a0","Type":"ContainerStarted","Data":"3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02"} Jan 23 08:48:52 crc kubenswrapper[4711]: I0123 08:48:52.020375 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"45ddb73c-7ce5-4674-8859-b4f7e60f99a0","Type":"ContainerStarted","Data":"c9c8583c1a1dced52a30823095ea208b933a53b80dc8a68709c251bbfa3a6ce5"} Jan 23 08:48:52 crc kubenswrapper[4711]: I0123 08:48:52.040471 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podStartSLOduration=1.040453785 podStartE2EDuration="1.040453785s" podCreationTimestamp="2026-01-23 08:48:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:48:52.034652192 +0000 UTC m=+1717.607608560" watchObservedRunningTime="2026-01-23 08:48:52.040453785 +0000 UTC m=+1717.613410153" Jan 23 08:48:52 crc kubenswrapper[4711]: I0123 08:48:52.361842 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:52 crc kubenswrapper[4711]: I0123 08:48:52.361932 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:55 crc kubenswrapper[4711]: I0123 08:48:55.481167 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:48:55 crc kubenswrapper[4711]: E0123 08:48:55.481718 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:48:56 crc kubenswrapper[4711]: I0123 08:48:56.052301 4711 generic.go:334] "Generic (PLEG): container finished" podID="ee376276-2fe0-47ce-9002-eae63b9efea2" containerID="80771f604e4ec6f268a0dbb6a11e67f206a15dfaeea948a0116c17f0dc4f7f7a" exitCode=0 Jan 23 08:48:56 crc kubenswrapper[4711]: I0123 08:48:56.052346 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" event={"ID":"ee376276-2fe0-47ce-9002-eae63b9efea2","Type":"ContainerDied","Data":"80771f604e4ec6f268a0dbb6a11e67f206a15dfaeea948a0116c17f0dc4f7f7a"} Jan 23 08:48:56 crc kubenswrapper[4711]: I0123 08:48:56.401829 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.361456 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.361815 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.390908 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.390972 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.483047 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.565761 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee376276-2fe0-47ce-9002-eae63b9efea2-scripts\") pod \"ee376276-2fe0-47ce-9002-eae63b9efea2\" (UID: \"ee376276-2fe0-47ce-9002-eae63b9efea2\") " Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.565834 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee376276-2fe0-47ce-9002-eae63b9efea2-config-data\") pod \"ee376276-2fe0-47ce-9002-eae63b9efea2\" (UID: \"ee376276-2fe0-47ce-9002-eae63b9efea2\") " Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.565904 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pg96n\" (UniqueName: \"kubernetes.io/projected/ee376276-2fe0-47ce-9002-eae63b9efea2-kube-api-access-pg96n\") pod \"ee376276-2fe0-47ce-9002-eae63b9efea2\" (UID: \"ee376276-2fe0-47ce-9002-eae63b9efea2\") " Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.570674 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee376276-2fe0-47ce-9002-eae63b9efea2-scripts" (OuterVolumeSpecName: "scripts") pod "ee376276-2fe0-47ce-9002-eae63b9efea2" (UID: "ee376276-2fe0-47ce-9002-eae63b9efea2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.571547 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee376276-2fe0-47ce-9002-eae63b9efea2-kube-api-access-pg96n" (OuterVolumeSpecName: "kube-api-access-pg96n") pod "ee376276-2fe0-47ce-9002-eae63b9efea2" (UID: "ee376276-2fe0-47ce-9002-eae63b9efea2"). InnerVolumeSpecName "kube-api-access-pg96n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.602748 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee376276-2fe0-47ce-9002-eae63b9efea2-config-data" (OuterVolumeSpecName: "config-data") pod "ee376276-2fe0-47ce-9002-eae63b9efea2" (UID: "ee376276-2fe0-47ce-9002-eae63b9efea2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.668779 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee376276-2fe0-47ce-9002-eae63b9efea2-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.668829 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee376276-2fe0-47ce-9002-eae63b9efea2-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:57 crc kubenswrapper[4711]: I0123 08:48:57.668845 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pg96n\" (UniqueName: \"kubernetes.io/projected/ee376276-2fe0-47ce-9002-eae63b9efea2-kube-api-access-pg96n\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.073190 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" event={"ID":"ee376276-2fe0-47ce-9002-eae63b9efea2","Type":"ContainerDied","Data":"3ad87abca092375bfa8e7a6db19e93b58c1ef7c3370c55a2c8ac2a6b5ee4cf22"} Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.073547 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ad87abca092375bfa8e7a6db19e93b58c1ef7c3370c55a2c8ac2a6b5ee4cf22" Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.073224 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt" Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.372860 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.373132 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="e8596dc4-07c1-4665-9e11-307a245c7555" containerName="nova-kuttl-api-log" containerID="cri-o://26fde45806ee1a2fc7a5f70afc681ec09d033023bd9d21dc40299a527d06cee2" gracePeriod=30 Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.373291 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="e8596dc4-07c1-4665-9e11-307a245c7555" containerName="nova-kuttl-api-api" containerID="cri-o://e914e648d155cfa7de640deef5e95ee9a47281abdab316fa2e5c1c54adc4f9d5" gracePeriod=30 Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.401616 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.401864 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="45ddb73c-7ce5-4674-8859-b4f7e60f99a0" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02" gracePeriod=30 Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.443767 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.165:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.443818 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="e8596dc4-07c1-4665-9e11-307a245c7555" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.166:8774/\": EOF (Client.Timeout exceeded while awaiting headers)" Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.443942 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="e8596dc4-07c1-4665-9e11-307a245c7555" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.166:8774/\": EOF (Client.Timeout exceeded while awaiting headers)" Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.444059 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.165:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.470963 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.471246 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" containerName="nova-kuttl-metadata-log" containerID="cri-o://1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36" gracePeriod=30 Jan 23 08:48:58 crc kubenswrapper[4711]: I0123 08:48:58.471363 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" containerName="nova-kuttl-metadata-metadata" containerID="cri-o://46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f" gracePeriod=30 Jan 23 08:48:59 crc kubenswrapper[4711]: I0123 08:48:59.084371 4711 generic.go:334] "Generic (PLEG): container finished" podID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" containerID="1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36" exitCode=143 Jan 23 08:48:59 crc kubenswrapper[4711]: I0123 08:48:59.085066 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"fa9d37bb-720a-4bd5-b741-a3a27402d4e3","Type":"ContainerDied","Data":"1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36"} Jan 23 08:48:59 crc kubenswrapper[4711]: I0123 08:48:59.087939 4711 generic.go:334] "Generic (PLEG): container finished" podID="e8596dc4-07c1-4665-9e11-307a245c7555" containerID="26fde45806ee1a2fc7a5f70afc681ec09d033023bd9d21dc40299a527d06cee2" exitCode=143 Jan 23 08:48:59 crc kubenswrapper[4711]: I0123 08:48:59.088001 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"e8596dc4-07c1-4665-9e11-307a245c7555","Type":"ContainerDied","Data":"26fde45806ee1a2fc7a5f70afc681ec09d033023bd9d21dc40299a527d06cee2"} Jan 23 08:48:59 crc kubenswrapper[4711]: I0123 08:48:59.459397 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:48:59 crc kubenswrapper[4711]: I0123 08:48:59.525825 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45ddb73c-7ce5-4674-8859-b4f7e60f99a0-config-data\") pod \"45ddb73c-7ce5-4674-8859-b4f7e60f99a0\" (UID: \"45ddb73c-7ce5-4674-8859-b4f7e60f99a0\") " Jan 23 08:48:59 crc kubenswrapper[4711]: I0123 08:48:59.526028 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxwzh\" (UniqueName: \"kubernetes.io/projected/45ddb73c-7ce5-4674-8859-b4f7e60f99a0-kube-api-access-hxwzh\") pod \"45ddb73c-7ce5-4674-8859-b4f7e60f99a0\" (UID: \"45ddb73c-7ce5-4674-8859-b4f7e60f99a0\") " Jan 23 08:48:59 crc kubenswrapper[4711]: I0123 08:48:59.534803 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45ddb73c-7ce5-4674-8859-b4f7e60f99a0-kube-api-access-hxwzh" (OuterVolumeSpecName: "kube-api-access-hxwzh") pod "45ddb73c-7ce5-4674-8859-b4f7e60f99a0" (UID: "45ddb73c-7ce5-4674-8859-b4f7e60f99a0"). InnerVolumeSpecName "kube-api-access-hxwzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:48:59 crc kubenswrapper[4711]: I0123 08:48:59.547715 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45ddb73c-7ce5-4674-8859-b4f7e60f99a0-config-data" (OuterVolumeSpecName: "config-data") pod "45ddb73c-7ce5-4674-8859-b4f7e60f99a0" (UID: "45ddb73c-7ce5-4674-8859-b4f7e60f99a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:48:59 crc kubenswrapper[4711]: I0123 08:48:59.628109 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxwzh\" (UniqueName: \"kubernetes.io/projected/45ddb73c-7ce5-4674-8859-b4f7e60f99a0-kube-api-access-hxwzh\") on node \"crc\" DevicePath \"\"" Jan 23 08:48:59 crc kubenswrapper[4711]: I0123 08:48:59.628156 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45ddb73c-7ce5-4674-8859-b4f7e60f99a0-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.097186 4711 generic.go:334] "Generic (PLEG): container finished" podID="45ddb73c-7ce5-4674-8859-b4f7e60f99a0" containerID="3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02" exitCode=0 Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.097241 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"45ddb73c-7ce5-4674-8859-b4f7e60f99a0","Type":"ContainerDied","Data":"3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02"} Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.097300 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"45ddb73c-7ce5-4674-8859-b4f7e60f99a0","Type":"ContainerDied","Data":"c9c8583c1a1dced52a30823095ea208b933a53b80dc8a68709c251bbfa3a6ce5"} Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.097323 4711 scope.go:117] "RemoveContainer" containerID="3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.097704 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.120206 4711 scope.go:117] "RemoveContainer" containerID="3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02" Jan 23 08:49:00 crc kubenswrapper[4711]: E0123 08:49:00.120741 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02\": container with ID starting with 3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02 not found: ID does not exist" containerID="3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.120934 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02"} err="failed to get container status \"3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02\": rpc error: code = NotFound desc = could not find container \"3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02\": container with ID starting with 3f85f6170e9898190c932349c52bfdbae52eb67bfc765bce4ab0112bbe65af02 not found: ID does not exist" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.134085 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.141598 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.153531 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:49:00 crc kubenswrapper[4711]: E0123 08:49:00.153999 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee376276-2fe0-47ce-9002-eae63b9efea2" containerName="nova-manage" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.154026 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee376276-2fe0-47ce-9002-eae63b9efea2" containerName="nova-manage" Jan 23 08:49:00 crc kubenswrapper[4711]: E0123 08:49:00.154049 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45ddb73c-7ce5-4674-8859-b4f7e60f99a0" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.154060 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="45ddb73c-7ce5-4674-8859-b4f7e60f99a0" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.154269 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="45ddb73c-7ce5-4674-8859-b4f7e60f99a0" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.154298 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee376276-2fe0-47ce-9002-eae63b9efea2" containerName="nova-manage" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.155862 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.158129 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-scheduler-config-data" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.163539 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.236692 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghxxl\" (UniqueName: \"kubernetes.io/projected/1e26ab32-cb9b-45fc-856c-f58e6742bb74-kube-api-access-ghxxl\") pod \"nova-kuttl-scheduler-0\" (UID: \"1e26ab32-cb9b-45fc-856c-f58e6742bb74\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.236786 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e26ab32-cb9b-45fc-856c-f58e6742bb74-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"1e26ab32-cb9b-45fc-856c-f58e6742bb74\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.337731 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghxxl\" (UniqueName: \"kubernetes.io/projected/1e26ab32-cb9b-45fc-856c-f58e6742bb74-kube-api-access-ghxxl\") pod \"nova-kuttl-scheduler-0\" (UID: \"1e26ab32-cb9b-45fc-856c-f58e6742bb74\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.337849 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e26ab32-cb9b-45fc-856c-f58e6742bb74-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"1e26ab32-cb9b-45fc-856c-f58e6742bb74\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.344458 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e26ab32-cb9b-45fc-856c-f58e6742bb74-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"1e26ab32-cb9b-45fc-856c-f58e6742bb74\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.354416 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghxxl\" (UniqueName: \"kubernetes.io/projected/1e26ab32-cb9b-45fc-856c-f58e6742bb74-kube-api-access-ghxxl\") pod \"nova-kuttl-scheduler-0\" (UID: \"1e26ab32-cb9b-45fc-856c-f58e6742bb74\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.476677 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:00 crc kubenswrapper[4711]: I0123 08:49:00.902379 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:49:01 crc kubenswrapper[4711]: I0123 08:49:01.106159 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"1e26ab32-cb9b-45fc-856c-f58e6742bb74","Type":"ContainerStarted","Data":"2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974"} Jan 23 08:49:01 crc kubenswrapper[4711]: I0123 08:49:01.106976 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"1e26ab32-cb9b-45fc-856c-f58e6742bb74","Type":"ContainerStarted","Data":"2b81a247267a5b3fc6dc23bdd302462ce39c3636722229e33a10c7dd3281a222"} Jan 23 08:49:01 crc kubenswrapper[4711]: I0123 08:49:01.148318 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podStartSLOduration=1.14829998 podStartE2EDuration="1.14829998s" podCreationTimestamp="2026-01-23 08:49:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:01.144844205 +0000 UTC m=+1726.717800583" watchObservedRunningTime="2026-01-23 08:49:01.14829998 +0000 UTC m=+1726.721256348" Jan 23 08:49:01 crc kubenswrapper[4711]: I0123 08:49:01.483042 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45ddb73c-7ce5-4674-8859-b4f7e60f99a0" path="/var/lib/kubelet/pods/45ddb73c-7ce5-4674-8859-b4f7e60f99a0/volumes" Jan 23 08:49:03 crc kubenswrapper[4711]: I0123 08:49:03.728706 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:03 crc kubenswrapper[4711]: I0123 08:49:03.800069 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-config-data\") pod \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\" (UID: \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\") " Jan 23 08:49:03 crc kubenswrapper[4711]: I0123 08:49:03.800163 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-logs\") pod \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\" (UID: \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\") " Jan 23 08:49:03 crc kubenswrapper[4711]: I0123 08:49:03.800231 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fth4k\" (UniqueName: \"kubernetes.io/projected/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-kube-api-access-fth4k\") pod \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\" (UID: \"fa9d37bb-720a-4bd5-b741-a3a27402d4e3\") " Jan 23 08:49:03 crc kubenswrapper[4711]: I0123 08:49:03.800681 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-logs" (OuterVolumeSpecName: "logs") pod "fa9d37bb-720a-4bd5-b741-a3a27402d4e3" (UID: "fa9d37bb-720a-4bd5-b741-a3a27402d4e3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:49:03 crc kubenswrapper[4711]: I0123 08:49:03.821892 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-config-data" (OuterVolumeSpecName: "config-data") pod "fa9d37bb-720a-4bd5-b741-a3a27402d4e3" (UID: "fa9d37bb-720a-4bd5-b741-a3a27402d4e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:49:03 crc kubenswrapper[4711]: I0123 08:49:03.822096 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-kube-api-access-fth4k" (OuterVolumeSpecName: "kube-api-access-fth4k") pod "fa9d37bb-720a-4bd5-b741-a3a27402d4e3" (UID: "fa9d37bb-720a-4bd5-b741-a3a27402d4e3"). InnerVolumeSpecName "kube-api-access-fth4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:49:03 crc kubenswrapper[4711]: I0123 08:49:03.902453 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fth4k\" (UniqueName: \"kubernetes.io/projected/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-kube-api-access-fth4k\") on node \"crc\" DevicePath \"\"" Jan 23 08:49:03 crc kubenswrapper[4711]: I0123 08:49:03.902486 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:49:03 crc kubenswrapper[4711]: I0123 08:49:03.902500 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa9d37bb-720a-4bd5-b741-a3a27402d4e3-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.131165 4711 generic.go:334] "Generic (PLEG): container finished" podID="e8596dc4-07c1-4665-9e11-307a245c7555" containerID="e914e648d155cfa7de640deef5e95ee9a47281abdab316fa2e5c1c54adc4f9d5" exitCode=0 Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.131258 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"e8596dc4-07c1-4665-9e11-307a245c7555","Type":"ContainerDied","Data":"e914e648d155cfa7de640deef5e95ee9a47281abdab316fa2e5c1c54adc4f9d5"} Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.133358 4711 generic.go:334] "Generic (PLEG): container finished" podID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" containerID="46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f" exitCode=0 Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.133392 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"fa9d37bb-720a-4bd5-b741-a3a27402d4e3","Type":"ContainerDied","Data":"46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f"} Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.133409 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.133421 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"fa9d37bb-720a-4bd5-b741-a3a27402d4e3","Type":"ContainerDied","Data":"780e4c4acd7b9ebdfe09b21d65cd610d9c3a33427eebbc832b86265abfb99282"} Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.133473 4711 scope.go:117] "RemoveContainer" containerID="46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.164769 4711 scope.go:117] "RemoveContainer" containerID="1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.171065 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.180392 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.200173 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:49:04 crc kubenswrapper[4711]: E0123 08:49:04.200609 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" containerName="nova-kuttl-metadata-log" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.200628 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" containerName="nova-kuttl-metadata-log" Jan 23 08:49:04 crc kubenswrapper[4711]: E0123 08:49:04.200648 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" containerName="nova-kuttl-metadata-metadata" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.200656 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" containerName="nova-kuttl-metadata-metadata" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.200858 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" containerName="nova-kuttl-metadata-metadata" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.200878 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" containerName="nova-kuttl-metadata-log" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.201999 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.203938 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-metadata-config-data" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.207676 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.254624 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.260755 4711 scope.go:117] "RemoveContainer" containerID="46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f" Jan 23 08:49:04 crc kubenswrapper[4711]: E0123 08:49:04.261209 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f\": container with ID starting with 46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f not found: ID does not exist" containerID="46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.261250 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f"} err="failed to get container status \"46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f\": rpc error: code = NotFound desc = could not find container \"46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f\": container with ID starting with 46576c1007aa3bd33140e11e182c6dbcab2913ac7e8616fa1001616ac892a64f not found: ID does not exist" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.261281 4711 scope.go:117] "RemoveContainer" containerID="1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36" Jan 23 08:49:04 crc kubenswrapper[4711]: E0123 08:49:04.261568 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36\": container with ID starting with 1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36 not found: ID does not exist" containerID="1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.261602 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36"} err="failed to get container status \"1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36\": rpc error: code = NotFound desc = could not find container \"1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36\": container with ID starting with 1d10c01ced90ab15bb0de662deb017d87f0fa9040a866ee0c0a43bdf18c3fb36 not found: ID does not exist" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.309292 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8596dc4-07c1-4665-9e11-307a245c7555-config-data\") pod \"e8596dc4-07c1-4665-9e11-307a245c7555\" (UID: \"e8596dc4-07c1-4665-9e11-307a245c7555\") " Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.309443 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pv257\" (UniqueName: \"kubernetes.io/projected/e8596dc4-07c1-4665-9e11-307a245c7555-kube-api-access-pv257\") pod \"e8596dc4-07c1-4665-9e11-307a245c7555\" (UID: \"e8596dc4-07c1-4665-9e11-307a245c7555\") " Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.309560 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8596dc4-07c1-4665-9e11-307a245c7555-logs\") pod \"e8596dc4-07c1-4665-9e11-307a245c7555\" (UID: \"e8596dc4-07c1-4665-9e11-307a245c7555\") " Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.309750 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.309798 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kcqh\" (UniqueName: \"kubernetes.io/projected/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-kube-api-access-6kcqh\") pod \"nova-kuttl-metadata-0\" (UID: \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.309819 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.310136 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8596dc4-07c1-4665-9e11-307a245c7555-logs" (OuterVolumeSpecName: "logs") pod "e8596dc4-07c1-4665-9e11-307a245c7555" (UID: "e8596dc4-07c1-4665-9e11-307a245c7555"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.314564 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8596dc4-07c1-4665-9e11-307a245c7555-kube-api-access-pv257" (OuterVolumeSpecName: "kube-api-access-pv257") pod "e8596dc4-07c1-4665-9e11-307a245c7555" (UID: "e8596dc4-07c1-4665-9e11-307a245c7555"). InnerVolumeSpecName "kube-api-access-pv257". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.329642 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8596dc4-07c1-4665-9e11-307a245c7555-config-data" (OuterVolumeSpecName: "config-data") pod "e8596dc4-07c1-4665-9e11-307a245c7555" (UID: "e8596dc4-07c1-4665-9e11-307a245c7555"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.411615 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.411669 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kcqh\" (UniqueName: \"kubernetes.io/projected/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-kube-api-access-6kcqh\") pod \"nova-kuttl-metadata-0\" (UID: \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.411714 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.411806 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8596dc4-07c1-4665-9e11-307a245c7555-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.411819 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pv257\" (UniqueName: \"kubernetes.io/projected/e8596dc4-07c1-4665-9e11-307a245c7555-kube-api-access-pv257\") on node \"crc\" DevicePath \"\"" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.411830 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8596dc4-07c1-4665-9e11-307a245c7555-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.412055 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.415903 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.427235 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kcqh\" (UniqueName: \"kubernetes.io/projected/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-kube-api-access-6kcqh\") pod \"nova-kuttl-metadata-0\" (UID: \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.570787 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:04 crc kubenswrapper[4711]: I0123 08:49:04.978188 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:49:04 crc kubenswrapper[4711]: W0123 08:49:04.983393 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74b5783d_fd94_4ad1_b7d6_23a1c223b37d.slice/crio-5222ae476c990190778f6045971e60139fa6a6e0973f802ac58a0cf38626e13d WatchSource:0}: Error finding container 5222ae476c990190778f6045971e60139fa6a6e0973f802ac58a0cf38626e13d: Status 404 returned error can't find the container with id 5222ae476c990190778f6045971e60139fa6a6e0973f802ac58a0cf38626e13d Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.146107 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"e8596dc4-07c1-4665-9e11-307a245c7555","Type":"ContainerDied","Data":"37866cf797c83f80e0342bc2b88b8070688d690f17dc96cfdac36c7ab0c1c2a7"} Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.146173 4711 scope.go:117] "RemoveContainer" containerID="e914e648d155cfa7de640deef5e95ee9a47281abdab316fa2e5c1c54adc4f9d5" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.146335 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.151184 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"74b5783d-fd94-4ad1-b7d6-23a1c223b37d","Type":"ContainerStarted","Data":"5222ae476c990190778f6045971e60139fa6a6e0973f802ac58a0cf38626e13d"} Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.166034 4711 scope.go:117] "RemoveContainer" containerID="26fde45806ee1a2fc7a5f70afc681ec09d033023bd9d21dc40299a527d06cee2" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.183099 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.196542 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.205234 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:49:05 crc kubenswrapper[4711]: E0123 08:49:05.205560 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8596dc4-07c1-4665-9e11-307a245c7555" containerName="nova-kuttl-api-log" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.205575 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8596dc4-07c1-4665-9e11-307a245c7555" containerName="nova-kuttl-api-log" Jan 23 08:49:05 crc kubenswrapper[4711]: E0123 08:49:05.205593 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8596dc4-07c1-4665-9e11-307a245c7555" containerName="nova-kuttl-api-api" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.205599 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8596dc4-07c1-4665-9e11-307a245c7555" containerName="nova-kuttl-api-api" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.205749 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8596dc4-07c1-4665-9e11-307a245c7555" containerName="nova-kuttl-api-log" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.205769 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8596dc4-07c1-4665-9e11-307a245c7555" containerName="nova-kuttl-api-api" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.208293 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.211599 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-api-config-data" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.218228 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.325691 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcab875b-6101-462d-a763-1aa2441eecd6-config-data\") pod \"nova-kuttl-api-0\" (UID: \"bcab875b-6101-462d-a763-1aa2441eecd6\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.326008 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zn98\" (UniqueName: \"kubernetes.io/projected/bcab875b-6101-462d-a763-1aa2441eecd6-kube-api-access-8zn98\") pod \"nova-kuttl-api-0\" (UID: \"bcab875b-6101-462d-a763-1aa2441eecd6\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.326091 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bcab875b-6101-462d-a763-1aa2441eecd6-logs\") pod \"nova-kuttl-api-0\" (UID: \"bcab875b-6101-462d-a763-1aa2441eecd6\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.427206 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zn98\" (UniqueName: \"kubernetes.io/projected/bcab875b-6101-462d-a763-1aa2441eecd6-kube-api-access-8zn98\") pod \"nova-kuttl-api-0\" (UID: \"bcab875b-6101-462d-a763-1aa2441eecd6\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.427303 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bcab875b-6101-462d-a763-1aa2441eecd6-logs\") pod \"nova-kuttl-api-0\" (UID: \"bcab875b-6101-462d-a763-1aa2441eecd6\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.427431 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcab875b-6101-462d-a763-1aa2441eecd6-config-data\") pod \"nova-kuttl-api-0\" (UID: \"bcab875b-6101-462d-a763-1aa2441eecd6\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.427961 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bcab875b-6101-462d-a763-1aa2441eecd6-logs\") pod \"nova-kuttl-api-0\" (UID: \"bcab875b-6101-462d-a763-1aa2441eecd6\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.431915 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcab875b-6101-462d-a763-1aa2441eecd6-config-data\") pod \"nova-kuttl-api-0\" (UID: \"bcab875b-6101-462d-a763-1aa2441eecd6\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.443346 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zn98\" (UniqueName: \"kubernetes.io/projected/bcab875b-6101-462d-a763-1aa2441eecd6-kube-api-access-8zn98\") pod \"nova-kuttl-api-0\" (UID: \"bcab875b-6101-462d-a763-1aa2441eecd6\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.483878 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8596dc4-07c1-4665-9e11-307a245c7555" path="/var/lib/kubelet/pods/e8596dc4-07c1-4665-9e11-307a245c7555/volumes" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.484763 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa9d37bb-720a-4bd5-b741-a3a27402d4e3" path="/var/lib/kubelet/pods/fa9d37bb-720a-4bd5-b741-a3a27402d4e3/volumes" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.485361 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:05 crc kubenswrapper[4711]: I0123 08:49:05.584401 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:06 crc kubenswrapper[4711]: I0123 08:49:06.008196 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:49:06 crc kubenswrapper[4711]: I0123 08:49:06.162781 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"bcab875b-6101-462d-a763-1aa2441eecd6","Type":"ContainerStarted","Data":"c90d3cbf54b43f3a74ece0460d33a0bcd97b9263e34709ef3fa896b44fbabdb6"} Jan 23 08:49:06 crc kubenswrapper[4711]: I0123 08:49:06.169655 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"74b5783d-fd94-4ad1-b7d6-23a1c223b37d","Type":"ContainerStarted","Data":"b5fbe5564ccf547d4add01ced66a73b11f6c83486f8866a46a68cd990b10f895"} Jan 23 08:49:06 crc kubenswrapper[4711]: I0123 08:49:06.169979 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"74b5783d-fd94-4ad1-b7d6-23a1c223b37d","Type":"ContainerStarted","Data":"be6268be31b40071e19e801a106475caadd229d1e2192d7b53fed596688a3ff5"} Jan 23 08:49:06 crc kubenswrapper[4711]: I0123 08:49:06.197855 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-0" podStartSLOduration=2.197833007 podStartE2EDuration="2.197833007s" podCreationTimestamp="2026-01-23 08:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:06.189709948 +0000 UTC m=+1731.762666326" watchObservedRunningTime="2026-01-23 08:49:06.197833007 +0000 UTC m=+1731.770789375" Jan 23 08:49:07 crc kubenswrapper[4711]: I0123 08:49:07.188086 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"bcab875b-6101-462d-a763-1aa2441eecd6","Type":"ContainerStarted","Data":"1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0"} Jan 23 08:49:07 crc kubenswrapper[4711]: I0123 08:49:07.188438 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"bcab875b-6101-462d-a763-1aa2441eecd6","Type":"ContainerStarted","Data":"37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073"} Jan 23 08:49:07 crc kubenswrapper[4711]: I0123 08:49:07.225936 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-0" podStartSLOduration=2.22591516 podStartE2EDuration="2.22591516s" podCreationTimestamp="2026-01-23 08:49:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:07.221218265 +0000 UTC m=+1732.794174633" watchObservedRunningTime="2026-01-23 08:49:07.22591516 +0000 UTC m=+1732.798871528" Jan 23 08:49:09 crc kubenswrapper[4711]: I0123 08:49:09.571677 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:09 crc kubenswrapper[4711]: I0123 08:49:09.571727 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:10 crc kubenswrapper[4711]: I0123 08:49:10.474176 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:49:10 crc kubenswrapper[4711]: E0123 08:49:10.475013 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:49:10 crc kubenswrapper[4711]: I0123 08:49:10.476854 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:10 crc kubenswrapper[4711]: I0123 08:49:10.502668 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:11 crc kubenswrapper[4711]: I0123 08:49:11.239256 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:49:14 crc kubenswrapper[4711]: I0123 08:49:14.571325 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:14 crc kubenswrapper[4711]: I0123 08:49:14.571999 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:15 crc kubenswrapper[4711]: I0123 08:49:15.585377 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:15 crc kubenswrapper[4711]: I0123 08:49:15.585778 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:15 crc kubenswrapper[4711]: I0123 08:49:15.655796 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.170:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:49:15 crc kubenswrapper[4711]: I0123 08:49:15.656294 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.170:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:49:16 crc kubenswrapper[4711]: I0123 08:49:16.667725 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="bcab875b-6101-462d-a763-1aa2441eecd6" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.171:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:49:16 crc kubenswrapper[4711]: I0123 08:49:16.667744 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="bcab875b-6101-462d-a763-1aa2441eecd6" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.171:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:49:23 crc kubenswrapper[4711]: I0123 08:49:23.475196 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:49:23 crc kubenswrapper[4711]: E0123 08:49:23.476135 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:49:24 crc kubenswrapper[4711]: I0123 08:49:24.573615 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:24 crc kubenswrapper[4711]: I0123 08:49:24.575183 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:24 crc kubenswrapper[4711]: I0123 08:49:24.576268 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:25 crc kubenswrapper[4711]: I0123 08:49:25.390886 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:49:25 crc kubenswrapper[4711]: I0123 08:49:25.588446 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:25 crc kubenswrapper[4711]: I0123 08:49:25.588496 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:25 crc kubenswrapper[4711]: I0123 08:49:25.589237 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:25 crc kubenswrapper[4711]: I0123 08:49:25.589264 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:25 crc kubenswrapper[4711]: I0123 08:49:25.591019 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:25 crc kubenswrapper[4711]: I0123 08:49:25.591066 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.270431 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-1"] Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.273068 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.281897 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-2"] Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.283614 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.295071 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-1"] Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.305985 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-2"] Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.416862 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-config-data\") pod \"nova-kuttl-api-2\" (UID: \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\") " pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.416926 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-logs\") pod \"nova-kuttl-api-2\" (UID: \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\") " pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.416993 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgcc2\" (UniqueName: \"kubernetes.io/projected/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-kube-api-access-wgcc2\") pod \"nova-kuttl-api-2\" (UID: \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\") " pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.417090 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5cc9\" (UniqueName: \"kubernetes.io/projected/19be5184-7f14-41b5-88a9-bd6f83eecde5-kube-api-access-m5cc9\") pod \"nova-kuttl-api-1\" (UID: \"19be5184-7f14-41b5-88a9-bd6f83eecde5\") " pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.417252 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19be5184-7f14-41b5-88a9-bd6f83eecde5-config-data\") pod \"nova-kuttl-api-1\" (UID: \"19be5184-7f14-41b5-88a9-bd6f83eecde5\") " pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.417538 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19be5184-7f14-41b5-88a9-bd6f83eecde5-logs\") pod \"nova-kuttl-api-1\" (UID: \"19be5184-7f14-41b5-88a9-bd6f83eecde5\") " pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.519083 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19be5184-7f14-41b5-88a9-bd6f83eecde5-config-data\") pod \"nova-kuttl-api-1\" (UID: \"19be5184-7f14-41b5-88a9-bd6f83eecde5\") " pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.519177 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19be5184-7f14-41b5-88a9-bd6f83eecde5-logs\") pod \"nova-kuttl-api-1\" (UID: \"19be5184-7f14-41b5-88a9-bd6f83eecde5\") " pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.519214 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-config-data\") pod \"nova-kuttl-api-2\" (UID: \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\") " pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.519247 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-logs\") pod \"nova-kuttl-api-2\" (UID: \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\") " pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.519303 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgcc2\" (UniqueName: \"kubernetes.io/projected/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-kube-api-access-wgcc2\") pod \"nova-kuttl-api-2\" (UID: \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\") " pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.519342 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5cc9\" (UniqueName: \"kubernetes.io/projected/19be5184-7f14-41b5-88a9-bd6f83eecde5-kube-api-access-m5cc9\") pod \"nova-kuttl-api-1\" (UID: \"19be5184-7f14-41b5-88a9-bd6f83eecde5\") " pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.519851 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-logs\") pod \"nova-kuttl-api-2\" (UID: \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\") " pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.519927 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19be5184-7f14-41b5-88a9-bd6f83eecde5-logs\") pod \"nova-kuttl-api-1\" (UID: \"19be5184-7f14-41b5-88a9-bd6f83eecde5\") " pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.525812 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-config-data\") pod \"nova-kuttl-api-2\" (UID: \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\") " pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.529856 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19be5184-7f14-41b5-88a9-bd6f83eecde5-config-data\") pod \"nova-kuttl-api-1\" (UID: \"19be5184-7f14-41b5-88a9-bd6f83eecde5\") " pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.543870 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgcc2\" (UniqueName: \"kubernetes.io/projected/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-kube-api-access-wgcc2\") pod \"nova-kuttl-api-2\" (UID: \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\") " pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.543964 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5cc9\" (UniqueName: \"kubernetes.io/projected/19be5184-7f14-41b5-88a9-bd6f83eecde5-kube-api-access-m5cc9\") pod \"nova-kuttl-api-1\" (UID: \"19be5184-7f14-41b5-88a9-bd6f83eecde5\") " pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.588523 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-1"] Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.590276 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.599855 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-2"] Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.600971 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.603632 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.606893 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-1"] Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.614702 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.628656 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-2"] Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.721618 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxpxc\" (UniqueName: \"kubernetes.io/projected/9287d315-4783-49d4-92a0-730d372a9a58-kube-api-access-rxpxc\") pod \"nova-kuttl-cell0-conductor-1\" (UID: \"9287d315-4783-49d4-92a0-730d372a9a58\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.721676 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9287d315-4783-49d4-92a0-730d372a9a58-config-data\") pod \"nova-kuttl-cell0-conductor-1\" (UID: \"9287d315-4783-49d4-92a0-730d372a9a58\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.721759 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnzkp\" (UniqueName: \"kubernetes.io/projected/718a4201-2f3d-4471-aecf-f3724cc7ce00-kube-api-access-nnzkp\") pod \"nova-kuttl-cell0-conductor-2\" (UID: \"718a4201-2f3d-4471-aecf-f3724cc7ce00\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.721906 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718a4201-2f3d-4471-aecf-f3724cc7ce00-config-data\") pod \"nova-kuttl-cell0-conductor-2\" (UID: \"718a4201-2f3d-4471-aecf-f3724cc7ce00\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.823997 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9287d315-4783-49d4-92a0-730d372a9a58-config-data\") pod \"nova-kuttl-cell0-conductor-1\" (UID: \"9287d315-4783-49d4-92a0-730d372a9a58\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.824120 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnzkp\" (UniqueName: \"kubernetes.io/projected/718a4201-2f3d-4471-aecf-f3724cc7ce00-kube-api-access-nnzkp\") pod \"nova-kuttl-cell0-conductor-2\" (UID: \"718a4201-2f3d-4471-aecf-f3724cc7ce00\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.824209 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718a4201-2f3d-4471-aecf-f3724cc7ce00-config-data\") pod \"nova-kuttl-cell0-conductor-2\" (UID: \"718a4201-2f3d-4471-aecf-f3724cc7ce00\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.824289 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxpxc\" (UniqueName: \"kubernetes.io/projected/9287d315-4783-49d4-92a0-730d372a9a58-kube-api-access-rxpxc\") pod \"nova-kuttl-cell0-conductor-1\" (UID: \"9287d315-4783-49d4-92a0-730d372a9a58\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.829809 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9287d315-4783-49d4-92a0-730d372a9a58-config-data\") pod \"nova-kuttl-cell0-conductor-1\" (UID: \"9287d315-4783-49d4-92a0-730d372a9a58\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.833021 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718a4201-2f3d-4471-aecf-f3724cc7ce00-config-data\") pod \"nova-kuttl-cell0-conductor-2\" (UID: \"718a4201-2f3d-4471-aecf-f3724cc7ce00\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.844263 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxpxc\" (UniqueName: \"kubernetes.io/projected/9287d315-4783-49d4-92a0-730d372a9a58-kube-api-access-rxpxc\") pod \"nova-kuttl-cell0-conductor-1\" (UID: \"9287d315-4783-49d4-92a0-730d372a9a58\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.845391 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnzkp\" (UniqueName: \"kubernetes.io/projected/718a4201-2f3d-4471-aecf-f3724cc7ce00-kube-api-access-nnzkp\") pod \"nova-kuttl-cell0-conductor-2\" (UID: \"718a4201-2f3d-4471-aecf-f3724cc7ce00\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.918525 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" Jan 23 08:49:28 crc kubenswrapper[4711]: I0123 08:49:28.927330 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" Jan 23 08:49:29 crc kubenswrapper[4711]: I0123 08:49:29.078398 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-1"] Jan 23 08:49:29 crc kubenswrapper[4711]: I0123 08:49:29.160928 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-2"] Jan 23 08:49:29 crc kubenswrapper[4711]: I0123 08:49:29.385581 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-1"] Jan 23 08:49:29 crc kubenswrapper[4711]: I0123 08:49:29.423542 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-2" event={"ID":"1fbd9125-a9d2-4477-a6a9-57a49ac330fb","Type":"ContainerStarted","Data":"82171870c0cb8141a1c4cae8531bf8810631bc3139039275203e88621bb52722"} Jan 23 08:49:29 crc kubenswrapper[4711]: I0123 08:49:29.423586 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-2" event={"ID":"1fbd9125-a9d2-4477-a6a9-57a49ac330fb","Type":"ContainerStarted","Data":"d74528420aef2d9adcaa9d28e66e546154663e63690f1afdc25e324bf47cb8f3"} Jan 23 08:49:29 crc kubenswrapper[4711]: I0123 08:49:29.426378 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" event={"ID":"9287d315-4783-49d4-92a0-730d372a9a58","Type":"ContainerStarted","Data":"b4b25088a6ef454d8e01bca9debc944e579ed5199468617c72ba35364cf8dbc9"} Jan 23 08:49:29 crc kubenswrapper[4711]: I0123 08:49:29.428041 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-1" event={"ID":"19be5184-7f14-41b5-88a9-bd6f83eecde5","Type":"ContainerStarted","Data":"cdae0d1e88dde900cae3f58906d96005420b1aec4a36d829ee499d05cfb133d0"} Jan 23 08:49:29 crc kubenswrapper[4711]: I0123 08:49:29.428071 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-1" event={"ID":"19be5184-7f14-41b5-88a9-bd6f83eecde5","Type":"ContainerStarted","Data":"0d98c73761f2537fc8736c9972719b9c9a3c8bc1f284388a8359df7323052b0f"} Jan 23 08:49:29 crc kubenswrapper[4711]: I0123 08:49:29.428080 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-1" event={"ID":"19be5184-7f14-41b5-88a9-bd6f83eecde5","Type":"ContainerStarted","Data":"d30ba0089f7b373a2d9cdcfddbc6167f08d4c263566eecba55ec514929899956"} Jan 23 08:49:29 crc kubenswrapper[4711]: I0123 08:49:29.455295 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-1" podStartSLOduration=1.455272766 podStartE2EDuration="1.455272766s" podCreationTimestamp="2026-01-23 08:49:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:29.443496516 +0000 UTC m=+1755.016452884" watchObservedRunningTime="2026-01-23 08:49:29.455272766 +0000 UTC m=+1755.028229144" Jan 23 08:49:29 crc kubenswrapper[4711]: I0123 08:49:29.499366 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-2"] Jan 23 08:49:30 crc kubenswrapper[4711]: I0123 08:49:30.688930 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" event={"ID":"718a4201-2f3d-4471-aecf-f3724cc7ce00","Type":"ContainerStarted","Data":"3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635"} Jan 23 08:49:30 crc kubenswrapper[4711]: I0123 08:49:30.689570 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" Jan 23 08:49:30 crc kubenswrapper[4711]: I0123 08:49:30.689663 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" event={"ID":"718a4201-2f3d-4471-aecf-f3724cc7ce00","Type":"ContainerStarted","Data":"f3783030d60c6c913552c6d71ddfdec0ea92bf3510cb11543e42cb01e42f1697"} Jan 23 08:49:30 crc kubenswrapper[4711]: I0123 08:49:30.698324 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-2" event={"ID":"1fbd9125-a9d2-4477-a6a9-57a49ac330fb","Type":"ContainerStarted","Data":"dd7823185394cf8222048f4f970b06c13ca0a73efa5a28ad573c6300f438883d"} Jan 23 08:49:30 crc kubenswrapper[4711]: I0123 08:49:30.702675 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" event={"ID":"9287d315-4783-49d4-92a0-730d372a9a58","Type":"ContainerStarted","Data":"c4caecb3a4094896b4afdab10ba38a00b1a5b757ee146e1bc76b917c287788dd"} Jan 23 08:49:30 crc kubenswrapper[4711]: I0123 08:49:30.722221 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" podStartSLOduration=2.722197362 podStartE2EDuration="2.722197362s" podCreationTimestamp="2026-01-23 08:49:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:30.7111125 +0000 UTC m=+1756.284068868" watchObservedRunningTime="2026-01-23 08:49:30.722197362 +0000 UTC m=+1756.295153730" Jan 23 08:49:30 crc kubenswrapper[4711]: I0123 08:49:30.735784 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-2" podStartSLOduration=2.735766783 podStartE2EDuration="2.735766783s" podCreationTimestamp="2026-01-23 08:49:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:30.729368897 +0000 UTC m=+1756.302325265" watchObservedRunningTime="2026-01-23 08:49:30.735766783 +0000 UTC m=+1756.308723161" Jan 23 08:49:30 crc kubenswrapper[4711]: I0123 08:49:30.759648 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" podStartSLOduration=2.759631008 podStartE2EDuration="2.759631008s" podCreationTimestamp="2026-01-23 08:49:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:30.754905813 +0000 UTC m=+1756.327862181" watchObservedRunningTime="2026-01-23 08:49:30.759631008 +0000 UTC m=+1756.332587376" Jan 23 08:49:31 crc kubenswrapper[4711]: I0123 08:49:31.710445 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" Jan 23 08:49:34 crc kubenswrapper[4711]: I0123 08:49:34.474190 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:49:34 crc kubenswrapper[4711]: E0123 08:49:34.475714 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:49:38 crc kubenswrapper[4711]: I0123 08:49:38.604431 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:38 crc kubenswrapper[4711]: I0123 08:49:38.605002 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:38 crc kubenswrapper[4711]: I0123 08:49:38.615694 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:38 crc kubenswrapper[4711]: I0123 08:49:38.615742 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:38 crc kubenswrapper[4711]: I0123 08:49:38.950234 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" Jan 23 08:49:38 crc kubenswrapper[4711]: I0123 08:49:38.961029 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" Jan 23 08:49:39 crc kubenswrapper[4711]: I0123 08:49:39.772667 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-1" podUID="19be5184-7f14-41b5-88a9-bd6f83eecde5" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.172:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:49:39 crc kubenswrapper[4711]: I0123 08:49:39.772961 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-2" podUID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.173:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:49:39 crc kubenswrapper[4711]: I0123 08:49:39.772995 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-1" podUID="19be5184-7f14-41b5-88a9-bd6f83eecde5" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.172:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:49:39 crc kubenswrapper[4711]: I0123 08:49:39.773020 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-2" podUID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.173:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.278696 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-2"] Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.280637 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.286688 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-1"] Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.287855 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.301330 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-1"] Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.318545 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70402d14-4b77-41a4-907d-3fda4e66b7cd-config-data\") pod \"nova-kuttl-scheduler-2\" (UID: \"70402d14-4b77-41a4-907d-3fda4e66b7cd\") " pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.318603 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzfcn\" (UniqueName: \"kubernetes.io/projected/70402d14-4b77-41a4-907d-3fda4e66b7cd-kube-api-access-bzfcn\") pod \"nova-kuttl-scheduler-2\" (UID: \"70402d14-4b77-41a4-907d-3fda4e66b7cd\") " pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.332593 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-2"] Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.393771 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-1"] Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.395149 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.412007 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-1"] Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.420002 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzfcn\" (UniqueName: \"kubernetes.io/projected/70402d14-4b77-41a4-907d-3fda4e66b7cd-kube-api-access-bzfcn\") pod \"nova-kuttl-scheduler-2\" (UID: \"70402d14-4b77-41a4-907d-3fda4e66b7cd\") " pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.420134 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf-config-data\") pod \"nova-kuttl-scheduler-1\" (UID: \"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf\") " pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.420169 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgdf6\" (UniqueName: \"kubernetes.io/projected/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf-kube-api-access-zgdf6\") pod \"nova-kuttl-scheduler-1\" (UID: \"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf\") " pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.420247 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70402d14-4b77-41a4-907d-3fda4e66b7cd-config-data\") pod \"nova-kuttl-scheduler-2\" (UID: \"70402d14-4b77-41a4-907d-3fda4e66b7cd\") " pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.421545 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-2"] Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.424134 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.435453 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70402d14-4b77-41a4-907d-3fda4e66b7cd-config-data\") pod \"nova-kuttl-scheduler-2\" (UID: \"70402d14-4b77-41a4-907d-3fda4e66b7cd\") " pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.524654 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fdc81782-4865-40e9-82c2-39a2e65fa1e2-logs\") pod \"nova-kuttl-metadata-2\" (UID: \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\") " pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.524714 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mz4bg\" (UniqueName: \"kubernetes.io/projected/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-kube-api-access-mz4bg\") pod \"nova-kuttl-metadata-1\" (UID: \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.524774 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc81782-4865-40e9-82c2-39a2e65fa1e2-config-data\") pod \"nova-kuttl-metadata-2\" (UID: \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\") " pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.524806 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-logs\") pod \"nova-kuttl-metadata-1\" (UID: \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.524904 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-config-data\") pod \"nova-kuttl-metadata-1\" (UID: \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.524931 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxct6\" (UniqueName: \"kubernetes.io/projected/fdc81782-4865-40e9-82c2-39a2e65fa1e2-kube-api-access-jxct6\") pod \"nova-kuttl-metadata-2\" (UID: \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\") " pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.524981 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf-config-data\") pod \"nova-kuttl-scheduler-1\" (UID: \"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf\") " pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.525012 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgdf6\" (UniqueName: \"kubernetes.io/projected/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf-kube-api-access-zgdf6\") pod \"nova-kuttl-scheduler-1\" (UID: \"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf\") " pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.527074 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzfcn\" (UniqueName: \"kubernetes.io/projected/70402d14-4b77-41a4-907d-3fda4e66b7cd-kube-api-access-bzfcn\") pod \"nova-kuttl-scheduler-2\" (UID: \"70402d14-4b77-41a4-907d-3fda4e66b7cd\") " pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.588733 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgdf6\" (UniqueName: \"kubernetes.io/projected/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf-kube-api-access-zgdf6\") pod \"nova-kuttl-scheduler-1\" (UID: \"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf\") " pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.590574 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-2"] Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.600354 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf-config-data\") pod \"nova-kuttl-scheduler-1\" (UID: \"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf\") " pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.619379 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.627607 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fdc81782-4865-40e9-82c2-39a2e65fa1e2-logs\") pod \"nova-kuttl-metadata-2\" (UID: \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\") " pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.627666 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mz4bg\" (UniqueName: \"kubernetes.io/projected/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-kube-api-access-mz4bg\") pod \"nova-kuttl-metadata-1\" (UID: \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.627714 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc81782-4865-40e9-82c2-39a2e65fa1e2-config-data\") pod \"nova-kuttl-metadata-2\" (UID: \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\") " pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.627748 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-logs\") pod \"nova-kuttl-metadata-1\" (UID: \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.627813 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-config-data\") pod \"nova-kuttl-metadata-1\" (UID: \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.627841 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxct6\" (UniqueName: \"kubernetes.io/projected/fdc81782-4865-40e9-82c2-39a2e65fa1e2-kube-api-access-jxct6\") pod \"nova-kuttl-metadata-2\" (UID: \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\") " pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.629331 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-logs\") pod \"nova-kuttl-metadata-1\" (UID: \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.634417 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc81782-4865-40e9-82c2-39a2e65fa1e2-config-data\") pod \"nova-kuttl-metadata-2\" (UID: \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\") " pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.634768 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fdc81782-4865-40e9-82c2-39a2e65fa1e2-logs\") pod \"nova-kuttl-metadata-2\" (UID: \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\") " pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.635583 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.649444 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-config-data\") pod \"nova-kuttl-metadata-1\" (UID: \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.664124 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxct6\" (UniqueName: \"kubernetes.io/projected/fdc81782-4865-40e9-82c2-39a2e65fa1e2-kube-api-access-jxct6\") pod \"nova-kuttl-metadata-2\" (UID: \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\") " pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.672310 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mz4bg\" (UniqueName: \"kubernetes.io/projected/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-kube-api-access-mz4bg\") pod \"nova-kuttl-metadata-1\" (UID: \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\") " pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.712976 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:40 crc kubenswrapper[4711]: I0123 08:49:40.780024 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.315941 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-1"] Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.358955 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-2"] Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.447603 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-1"] Jan 23 08:49:41 crc kubenswrapper[4711]: W0123 08:49:41.539111 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdc81782_4865_40e9_82c2_39a2e65fa1e2.slice/crio-eb9f650e6c3d0da4dd7c34e6731d2dc6b2eb42f3ef9759f6393d5cd21c169bd3 WatchSource:0}: Error finding container eb9f650e6c3d0da4dd7c34e6731d2dc6b2eb42f3ef9759f6393d5cd21c169bd3: Status 404 returned error can't find the container with id eb9f650e6c3d0da4dd7c34e6731d2dc6b2eb42f3ef9759f6393d5cd21c169bd3 Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.540558 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-2"] Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.829029 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-2" event={"ID":"fdc81782-4865-40e9-82c2-39a2e65fa1e2","Type":"ContainerStarted","Data":"5b1ec1492997fddf3f23df2f78ca7cc3e54eb22383f28666c60d02a638fab645"} Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.829421 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-2" event={"ID":"fdc81782-4865-40e9-82c2-39a2e65fa1e2","Type":"ContainerStarted","Data":"eb9f650e6c3d0da4dd7c34e6731d2dc6b2eb42f3ef9759f6393d5cd21c169bd3"} Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.838462 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-1" event={"ID":"03f2241c-8262-4dfc-9425-8c48fc2ab7e3","Type":"ContainerStarted","Data":"0aced10eb8db89b3d6952004e7468ad74e2e73f1e1a429494bddb96459a49d35"} Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.838547 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-1" event={"ID":"03f2241c-8262-4dfc-9425-8c48fc2ab7e3","Type":"ContainerStarted","Data":"4edba40e45bdffb13f14b99422197614aa2c010e8398466a6fced331796f82f9"} Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.840054 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-2" event={"ID":"70402d14-4b77-41a4-907d-3fda4e66b7cd","Type":"ContainerStarted","Data":"eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023"} Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.840100 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-2" event={"ID":"70402d14-4b77-41a4-907d-3fda4e66b7cd","Type":"ContainerStarted","Data":"a4a4fb741a20b5492d9d245e9fff97af9fbbbbd535c951b12ebe23c37f8e99fd"} Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.849555 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-1" event={"ID":"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf","Type":"ContainerStarted","Data":"51e40f1f2459117feb29255f35b9f5ff3936b16a1dfdcc5ba369c2094c388d45"} Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.849709 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-1" event={"ID":"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf","Type":"ContainerStarted","Data":"5589010028ba384fd3cda7b3f3883b7cc564216d70e3522531a2431325ce2e25"} Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.888857 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-1" podStartSLOduration=1.888839667 podStartE2EDuration="1.888839667s" podCreationTimestamp="2026-01-23 08:49:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:41.887071024 +0000 UTC m=+1767.460027392" watchObservedRunningTime="2026-01-23 08:49:41.888839667 +0000 UTC m=+1767.461796035" Jan 23 08:49:41 crc kubenswrapper[4711]: I0123 08:49:41.900619 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-2" podStartSLOduration=1.900600115 podStartE2EDuration="1.900600115s" podCreationTimestamp="2026-01-23 08:49:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:41.867211428 +0000 UTC m=+1767.440167806" watchObservedRunningTime="2026-01-23 08:49:41.900600115 +0000 UTC m=+1767.473556483" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.003662 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-2"] Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.005105 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.013521 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-1"] Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.014847 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.030232 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-1"] Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.040613 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-2"] Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.053551 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13a3efe4-fb91-4f25-b266-84cef3bd94b1-config-data\") pod \"nova-kuttl-cell1-conductor-1\" (UID: \"13a3efe4-fb91-4f25-b266-84cef3bd94b1\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.053593 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdjzw\" (UniqueName: \"kubernetes.io/projected/13a3efe4-fb91-4f25-b266-84cef3bd94b1-kube-api-access-jdjzw\") pod \"nova-kuttl-cell1-conductor-1\" (UID: \"13a3efe4-fb91-4f25-b266-84cef3bd94b1\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.053667 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c752g\" (UniqueName: \"kubernetes.io/projected/e7695748-8afe-456b-bffe-7908726a6ca1-kube-api-access-c752g\") pod \"nova-kuttl-cell1-conductor-2\" (UID: \"e7695748-8afe-456b-bffe-7908726a6ca1\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.053705 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7695748-8afe-456b-bffe-7908726a6ca1-config-data\") pod \"nova-kuttl-cell1-conductor-2\" (UID: \"e7695748-8afe-456b-bffe-7908726a6ca1\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.155227 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13a3efe4-fb91-4f25-b266-84cef3bd94b1-config-data\") pod \"nova-kuttl-cell1-conductor-1\" (UID: \"13a3efe4-fb91-4f25-b266-84cef3bd94b1\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.155274 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdjzw\" (UniqueName: \"kubernetes.io/projected/13a3efe4-fb91-4f25-b266-84cef3bd94b1-kube-api-access-jdjzw\") pod \"nova-kuttl-cell1-conductor-1\" (UID: \"13a3efe4-fb91-4f25-b266-84cef3bd94b1\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.155342 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c752g\" (UniqueName: \"kubernetes.io/projected/e7695748-8afe-456b-bffe-7908726a6ca1-kube-api-access-c752g\") pod \"nova-kuttl-cell1-conductor-2\" (UID: \"e7695748-8afe-456b-bffe-7908726a6ca1\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.155369 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7695748-8afe-456b-bffe-7908726a6ca1-config-data\") pod \"nova-kuttl-cell1-conductor-2\" (UID: \"e7695748-8afe-456b-bffe-7908726a6ca1\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.160169 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13a3efe4-fb91-4f25-b266-84cef3bd94b1-config-data\") pod \"nova-kuttl-cell1-conductor-1\" (UID: \"13a3efe4-fb91-4f25-b266-84cef3bd94b1\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.160679 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7695748-8afe-456b-bffe-7908726a6ca1-config-data\") pod \"nova-kuttl-cell1-conductor-2\" (UID: \"e7695748-8afe-456b-bffe-7908726a6ca1\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.181885 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdjzw\" (UniqueName: \"kubernetes.io/projected/13a3efe4-fb91-4f25-b266-84cef3bd94b1-kube-api-access-jdjzw\") pod \"nova-kuttl-cell1-conductor-1\" (UID: \"13a3efe4-fb91-4f25-b266-84cef3bd94b1\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.182483 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c752g\" (UniqueName: \"kubernetes.io/projected/e7695748-8afe-456b-bffe-7908726a6ca1-kube-api-access-c752g\") pod \"nova-kuttl-cell1-conductor-2\" (UID: \"e7695748-8afe-456b-bffe-7908726a6ca1\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.374078 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.383359 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.867145 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-1" event={"ID":"03f2241c-8262-4dfc-9425-8c48fc2ab7e3","Type":"ContainerStarted","Data":"60df0aa3d262579d6c41f2ae3c9a66e1ab55fa78629a039b81218a364f17f041"} Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.872123 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-2" event={"ID":"fdc81782-4865-40e9-82c2-39a2e65fa1e2","Type":"ContainerStarted","Data":"591437a4cd0b764b1ab44e2ca412f7a52c1a6edc8ac8b80e92a1a7372b83023e"} Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.896833 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-1" podStartSLOduration=2.896817138 podStartE2EDuration="2.896817138s" podCreationTimestamp="2026-01-23 08:49:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:42.893656371 +0000 UTC m=+1768.466612739" watchObservedRunningTime="2026-01-23 08:49:42.896817138 +0000 UTC m=+1768.469773506" Jan 23 08:49:42 crc kubenswrapper[4711]: I0123 08:49:42.939049 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-2" podStartSLOduration=2.939027192 podStartE2EDuration="2.939027192s" podCreationTimestamp="2026-01-23 08:49:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:42.923781568 +0000 UTC m=+1768.496737946" watchObservedRunningTime="2026-01-23 08:49:42.939027192 +0000 UTC m=+1768.511983560" Jan 23 08:49:43 crc kubenswrapper[4711]: I0123 08:49:43.084217 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-2"] Jan 23 08:49:43 crc kubenswrapper[4711]: I0123 08:49:43.242792 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-1"] Jan 23 08:49:43 crc kubenswrapper[4711]: W0123 08:49:43.248543 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13a3efe4_fb91_4f25_b266_84cef3bd94b1.slice/crio-8d158b3933170abbd92370a3bed62f30ce9d3e1cd2941dce1b9024144bff9ad2 WatchSource:0}: Error finding container 8d158b3933170abbd92370a3bed62f30ce9d3e1cd2941dce1b9024144bff9ad2: Status 404 returned error can't find the container with id 8d158b3933170abbd92370a3bed62f30ce9d3e1cd2941dce1b9024144bff9ad2 Jan 23 08:49:43 crc kubenswrapper[4711]: I0123 08:49:43.879107 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" event={"ID":"e7695748-8afe-456b-bffe-7908726a6ca1","Type":"ContainerStarted","Data":"3eb9a849d328e95c270ac399d44ae87a5007dfa9b9ac175c4de1db3e1acb1341"} Jan 23 08:49:43 crc kubenswrapper[4711]: I0123 08:49:43.880929 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" event={"ID":"13a3efe4-fb91-4f25-b266-84cef3bd94b1","Type":"ContainerStarted","Data":"8d158b3933170abbd92370a3bed62f30ce9d3e1cd2941dce1b9024144bff9ad2"} Jan 23 08:49:44 crc kubenswrapper[4711]: I0123 08:49:44.890393 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" event={"ID":"13a3efe4-fb91-4f25-b266-84cef3bd94b1","Type":"ContainerStarted","Data":"06dc3dccdf9391925cc6b2e757a2102f7ac109f205eeeb4f3a00541e99be56e5"} Jan 23 08:49:44 crc kubenswrapper[4711]: I0123 08:49:44.890725 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" Jan 23 08:49:44 crc kubenswrapper[4711]: I0123 08:49:44.893178 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" event={"ID":"e7695748-8afe-456b-bffe-7908726a6ca1","Type":"ContainerStarted","Data":"b7e4fd18264161cdf0e0e820d2428400f3d4ca60bb722f703e2e01cdc50959ff"} Jan 23 08:49:44 crc kubenswrapper[4711]: I0123 08:49:44.893318 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" Jan 23 08:49:44 crc kubenswrapper[4711]: I0123 08:49:44.917792 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" podStartSLOduration=3.91777363 podStartE2EDuration="3.91777363s" podCreationTimestamp="2026-01-23 08:49:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:44.906725839 +0000 UTC m=+1770.479682207" watchObservedRunningTime="2026-01-23 08:49:44.91777363 +0000 UTC m=+1770.490729998" Jan 23 08:49:44 crc kubenswrapper[4711]: I0123 08:49:44.934574 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" podStartSLOduration=3.934550851 podStartE2EDuration="3.934550851s" podCreationTimestamp="2026-01-23 08:49:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:49:44.92757519 +0000 UTC m=+1770.500531558" watchObservedRunningTime="2026-01-23 08:49:44.934550851 +0000 UTC m=+1770.507507219" Jan 23 08:49:45 crc kubenswrapper[4711]: I0123 08:49:45.619803 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:49:45 crc kubenswrapper[4711]: I0123 08:49:45.636588 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:49:45 crc kubenswrapper[4711]: I0123 08:49:45.714653 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:45 crc kubenswrapper[4711]: I0123 08:49:45.714716 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:45 crc kubenswrapper[4711]: I0123 08:49:45.784598 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:45 crc kubenswrapper[4711]: I0123 08:49:45.785434 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:47 crc kubenswrapper[4711]: I0123 08:49:47.055306 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-db-create-hdxrj"] Jan 23 08:49:47 crc kubenswrapper[4711]: I0123 08:49:47.069988 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/placement-728b-account-create-update-nllmn"] Jan 23 08:49:47 crc kubenswrapper[4711]: I0123 08:49:47.080109 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/placement-728b-account-create-update-nllmn"] Jan 23 08:49:47 crc kubenswrapper[4711]: I0123 08:49:47.088222 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-8a69-account-create-update-w6dvl"] Jan 23 08:49:47 crc kubenswrapper[4711]: I0123 08:49:47.095391 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-db-create-hdxrj"] Jan 23 08:49:47 crc kubenswrapper[4711]: I0123 08:49:47.102642 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-8a69-account-create-update-w6dvl"] Jan 23 08:49:47 crc kubenswrapper[4711]: I0123 08:49:47.485819 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15d700a2-d850-4643-b834-4bcad727b44e" path="/var/lib/kubelet/pods/15d700a2-d850-4643-b834-4bcad727b44e/volumes" Jan 23 08:49:47 crc kubenswrapper[4711]: I0123 08:49:47.486685 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4af8008c-c038-45dd-9384-485e4ba2c730" path="/var/lib/kubelet/pods/4af8008c-c038-45dd-9384-485e4ba2c730/volumes" Jan 23 08:49:47 crc kubenswrapper[4711]: I0123 08:49:47.487407 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a72521f-2374-401f-99fe-a4c1c9a06aef" path="/var/lib/kubelet/pods/9a72521f-2374-401f-99fe-a4c1c9a06aef/volumes" Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.024720 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/placement-db-create-pgw2h"] Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.032346 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/placement-db-create-pgw2h"] Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.609466 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.609609 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.610274 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.610348 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.613785 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.616989 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.619703 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.621160 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.621369 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.625063 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.929839 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:48 crc kubenswrapper[4711]: I0123 08:49:48.933239 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:49:49 crc kubenswrapper[4711]: I0123 08:49:49.473868 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:49:49 crc kubenswrapper[4711]: E0123 08:49:49.475500 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:49:49 crc kubenswrapper[4711]: I0123 08:49:49.484048 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f" path="/var/lib/kubelet/pods/51f2d8dc-db68-47a3-b7c7-45c6eaf8dd8f/volumes" Jan 23 08:49:50 crc kubenswrapper[4711]: I0123 08:49:50.620392 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:49:50 crc kubenswrapper[4711]: I0123 08:49:50.636951 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:49:50 crc kubenswrapper[4711]: I0123 08:49:50.650783 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:49:50 crc kubenswrapper[4711]: I0123 08:49:50.665459 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:49:50 crc kubenswrapper[4711]: I0123 08:49:50.714080 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:50 crc kubenswrapper[4711]: I0123 08:49:50.714119 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:49:50 crc kubenswrapper[4711]: I0123 08:49:50.781179 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:50 crc kubenswrapper[4711]: I0123 08:49:50.781263 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:49:50 crc kubenswrapper[4711]: I0123 08:49:50.993519 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:49:50 crc kubenswrapper[4711]: I0123 08:49:50.993574 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:49:51 crc kubenswrapper[4711]: I0123 08:49:51.797810 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-1" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.178:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:49:51 crc kubenswrapper[4711]: I0123 08:49:51.797856 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-1" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.178:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:49:51 crc kubenswrapper[4711]: I0123 08:49:51.838926 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-2" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.179:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:49:51 crc kubenswrapper[4711]: I0123 08:49:51.879794 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-2" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.179:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:49:52 crc kubenswrapper[4711]: I0123 08:49:52.406206 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" Jan 23 08:49:52 crc kubenswrapper[4711]: I0123 08:49:52.410797 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" Jan 23 08:50:00 crc kubenswrapper[4711]: I0123 08:50:00.718150 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:50:00 crc kubenswrapper[4711]: I0123 08:50:00.721223 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:50:00 crc kubenswrapper[4711]: I0123 08:50:00.722644 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:50:00 crc kubenswrapper[4711]: I0123 08:50:00.784554 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:50:00 crc kubenswrapper[4711]: I0123 08:50:00.784633 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:50:00 crc kubenswrapper[4711]: I0123 08:50:00.786680 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:50:00 crc kubenswrapper[4711]: I0123 08:50:00.786883 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:50:01 crc kubenswrapper[4711]: I0123 08:50:01.048941 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:50:01 crc kubenswrapper[4711]: I0123 08:50:01.474267 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:50:01 crc kubenswrapper[4711]: E0123 08:50:01.474696 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:50:01 crc kubenswrapper[4711]: I0123 08:50:01.759039 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-2"] Jan 23 08:50:01 crc kubenswrapper[4711]: I0123 08:50:01.759307 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-2" podUID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" containerName="nova-kuttl-api-log" containerID="cri-o://82171870c0cb8141a1c4cae8531bf8810631bc3139039275203e88621bb52722" gracePeriod=30 Jan 23 08:50:01 crc kubenswrapper[4711]: I0123 08:50:01.759873 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-2" podUID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" containerName="nova-kuttl-api-api" containerID="cri-o://dd7823185394cf8222048f4f970b06c13ca0a73efa5a28ad573c6300f438883d" gracePeriod=30 Jan 23 08:50:01 crc kubenswrapper[4711]: I0123 08:50:01.771894 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-1"] Jan 23 08:50:01 crc kubenswrapper[4711]: I0123 08:50:01.772231 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-1" podUID="19be5184-7f14-41b5-88a9-bd6f83eecde5" containerName="nova-kuttl-api-log" containerID="cri-o://0d98c73761f2537fc8736c9972719b9c9a3c8bc1f284388a8359df7323052b0f" gracePeriod=30 Jan 23 08:50:01 crc kubenswrapper[4711]: I0123 08:50:01.772321 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-1" podUID="19be5184-7f14-41b5-88a9-bd6f83eecde5" containerName="nova-kuttl-api-api" containerID="cri-o://cdae0d1e88dde900cae3f58906d96005420b1aec4a36d829ee499d05cfb133d0" gracePeriod=30 Jan 23 08:50:02 crc kubenswrapper[4711]: I0123 08:50:02.059888 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-2"] Jan 23 08:50:02 crc kubenswrapper[4711]: I0123 08:50:02.060102 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" podUID="718a4201-2f3d-4471-aecf-f3724cc7ce00" containerName="nova-kuttl-cell0-conductor-conductor" containerID="cri-o://3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635" gracePeriod=30 Jan 23 08:50:02 crc kubenswrapper[4711]: I0123 08:50:02.061857 4711 generic.go:334] "Generic (PLEG): container finished" podID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" containerID="82171870c0cb8141a1c4cae8531bf8810631bc3139039275203e88621bb52722" exitCode=143 Jan 23 08:50:02 crc kubenswrapper[4711]: I0123 08:50:02.061935 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-2" event={"ID":"1fbd9125-a9d2-4477-a6a9-57a49ac330fb","Type":"ContainerDied","Data":"82171870c0cb8141a1c4cae8531bf8810631bc3139039275203e88621bb52722"} Jan 23 08:50:02 crc kubenswrapper[4711]: I0123 08:50:02.067081 4711 generic.go:334] "Generic (PLEG): container finished" podID="19be5184-7f14-41b5-88a9-bd6f83eecde5" containerID="0d98c73761f2537fc8736c9972719b9c9a3c8bc1f284388a8359df7323052b0f" exitCode=143 Jan 23 08:50:02 crc kubenswrapper[4711]: I0123 08:50:02.068179 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-1" event={"ID":"19be5184-7f14-41b5-88a9-bd6f83eecde5","Type":"ContainerDied","Data":"0d98c73761f2537fc8736c9972719b9c9a3c8bc1f284388a8359df7323052b0f"} Jan 23 08:50:02 crc kubenswrapper[4711]: I0123 08:50:02.076475 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-1"] Jan 23 08:50:02 crc kubenswrapper[4711]: I0123 08:50:02.076844 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" podUID="9287d315-4783-49d4-92a0-730d372a9a58" containerName="nova-kuttl-cell0-conductor-conductor" containerID="cri-o://c4caecb3a4094896b4afdab10ba38a00b1a5b757ee146e1bc76b917c287788dd" gracePeriod=30 Jan 23 08:50:03 crc kubenswrapper[4711]: E0123 08:50:03.922732 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c4caecb3a4094896b4afdab10ba38a00b1a5b757ee146e1bc76b917c287788dd" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:50:03 crc kubenswrapper[4711]: E0123 08:50:03.924333 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c4caecb3a4094896b4afdab10ba38a00b1a5b757ee146e1bc76b917c287788dd" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:50:03 crc kubenswrapper[4711]: E0123 08:50:03.925850 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c4caecb3a4094896b4afdab10ba38a00b1a5b757ee146e1bc76b917c287788dd" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:50:03 crc kubenswrapper[4711]: E0123 08:50:03.925938 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" podUID="9287d315-4783-49d4-92a0-730d372a9a58" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:50:03 crc kubenswrapper[4711]: E0123 08:50:03.932030 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:50:03 crc kubenswrapper[4711]: E0123 08:50:03.934118 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:50:03 crc kubenswrapper[4711]: E0123 08:50:03.936692 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:50:03 crc kubenswrapper[4711]: E0123 08:50:03.936753 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" podUID="718a4201-2f3d-4471-aecf-f3724cc7ce00" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.120012 4711 generic.go:334] "Generic (PLEG): container finished" podID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" containerID="dd7823185394cf8222048f4f970b06c13ca0a73efa5a28ad573c6300f438883d" exitCode=0 Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.120081 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-2" event={"ID":"1fbd9125-a9d2-4477-a6a9-57a49ac330fb","Type":"ContainerDied","Data":"dd7823185394cf8222048f4f970b06c13ca0a73efa5a28ad573c6300f438883d"} Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.122971 4711 generic.go:334] "Generic (PLEG): container finished" podID="19be5184-7f14-41b5-88a9-bd6f83eecde5" containerID="cdae0d1e88dde900cae3f58906d96005420b1aec4a36d829ee499d05cfb133d0" exitCode=0 Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.123026 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-1" event={"ID":"19be5184-7f14-41b5-88a9-bd6f83eecde5","Type":"ContainerDied","Data":"cdae0d1e88dde900cae3f58906d96005420b1aec4a36d829ee499d05cfb133d0"} Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.409305 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.415815 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.522890 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-config-data\") pod \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\" (UID: \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\") " Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.522944 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5cc9\" (UniqueName: \"kubernetes.io/projected/19be5184-7f14-41b5-88a9-bd6f83eecde5-kube-api-access-m5cc9\") pod \"19be5184-7f14-41b5-88a9-bd6f83eecde5\" (UID: \"19be5184-7f14-41b5-88a9-bd6f83eecde5\") " Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.522977 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-logs\") pod \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\" (UID: \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\") " Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.523031 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19be5184-7f14-41b5-88a9-bd6f83eecde5-config-data\") pod \"19be5184-7f14-41b5-88a9-bd6f83eecde5\" (UID: \"19be5184-7f14-41b5-88a9-bd6f83eecde5\") " Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.523190 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19be5184-7f14-41b5-88a9-bd6f83eecde5-logs\") pod \"19be5184-7f14-41b5-88a9-bd6f83eecde5\" (UID: \"19be5184-7f14-41b5-88a9-bd6f83eecde5\") " Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.523231 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgcc2\" (UniqueName: \"kubernetes.io/projected/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-kube-api-access-wgcc2\") pod \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\" (UID: \"1fbd9125-a9d2-4477-a6a9-57a49ac330fb\") " Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.523990 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-logs" (OuterVolumeSpecName: "logs") pod "1fbd9125-a9d2-4477-a6a9-57a49ac330fb" (UID: "1fbd9125-a9d2-4477-a6a9-57a49ac330fb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.524444 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19be5184-7f14-41b5-88a9-bd6f83eecde5-logs" (OuterVolumeSpecName: "logs") pod "19be5184-7f14-41b5-88a9-bd6f83eecde5" (UID: "19be5184-7f14-41b5-88a9-bd6f83eecde5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.528582 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-kube-api-access-wgcc2" (OuterVolumeSpecName: "kube-api-access-wgcc2") pod "1fbd9125-a9d2-4477-a6a9-57a49ac330fb" (UID: "1fbd9125-a9d2-4477-a6a9-57a49ac330fb"). InnerVolumeSpecName "kube-api-access-wgcc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.528947 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19be5184-7f14-41b5-88a9-bd6f83eecde5-kube-api-access-m5cc9" (OuterVolumeSpecName: "kube-api-access-m5cc9") pod "19be5184-7f14-41b5-88a9-bd6f83eecde5" (UID: "19be5184-7f14-41b5-88a9-bd6f83eecde5"). InnerVolumeSpecName "kube-api-access-m5cc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.547824 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19be5184-7f14-41b5-88a9-bd6f83eecde5-config-data" (OuterVolumeSpecName: "config-data") pod "19be5184-7f14-41b5-88a9-bd6f83eecde5" (UID: "19be5184-7f14-41b5-88a9-bd6f83eecde5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.550416 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-config-data" (OuterVolumeSpecName: "config-data") pod "1fbd9125-a9d2-4477-a6a9-57a49ac330fb" (UID: "1fbd9125-a9d2-4477-a6a9-57a49ac330fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.625148 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/19be5184-7f14-41b5-88a9-bd6f83eecde5-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.625874 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgcc2\" (UniqueName: \"kubernetes.io/projected/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-kube-api-access-wgcc2\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.625901 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.625916 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5cc9\" (UniqueName: \"kubernetes.io/projected/19be5184-7f14-41b5-88a9-bd6f83eecde5-kube-api-access-m5cc9\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.625928 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fbd9125-a9d2-4477-a6a9-57a49ac330fb-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:05 crc kubenswrapper[4711]: I0123 08:50:05.625943 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19be5184-7f14-41b5-88a9-bd6f83eecde5-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:06 crc kubenswrapper[4711]: I0123 08:50:06.135154 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-1" Jan 23 08:50:06 crc kubenswrapper[4711]: I0123 08:50:06.135149 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-1" event={"ID":"19be5184-7f14-41b5-88a9-bd6f83eecde5","Type":"ContainerDied","Data":"d30ba0089f7b373a2d9cdcfddbc6167f08d4c263566eecba55ec514929899956"} Jan 23 08:50:06 crc kubenswrapper[4711]: I0123 08:50:06.136281 4711 scope.go:117] "RemoveContainer" containerID="cdae0d1e88dde900cae3f58906d96005420b1aec4a36d829ee499d05cfb133d0" Jan 23 08:50:06 crc kubenswrapper[4711]: I0123 08:50:06.136843 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-2" event={"ID":"1fbd9125-a9d2-4477-a6a9-57a49ac330fb","Type":"ContainerDied","Data":"d74528420aef2d9adcaa9d28e66e546154663e63690f1afdc25e324bf47cb8f3"} Jan 23 08:50:06 crc kubenswrapper[4711]: I0123 08:50:06.136905 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-2" Jan 23 08:50:06 crc kubenswrapper[4711]: I0123 08:50:06.171954 4711 scope.go:117] "RemoveContainer" containerID="0d98c73761f2537fc8736c9972719b9c9a3c8bc1f284388a8359df7323052b0f" Jan 23 08:50:06 crc kubenswrapper[4711]: I0123 08:50:06.193030 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-2"] Jan 23 08:50:06 crc kubenswrapper[4711]: I0123 08:50:06.205697 4711 scope.go:117] "RemoveContainer" containerID="dd7823185394cf8222048f4f970b06c13ca0a73efa5a28ad573c6300f438883d" Jan 23 08:50:06 crc kubenswrapper[4711]: I0123 08:50:06.208242 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-2"] Jan 23 08:50:06 crc kubenswrapper[4711]: I0123 08:50:06.215685 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-1"] Jan 23 08:50:06 crc kubenswrapper[4711]: I0123 08:50:06.229043 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-1"] Jan 23 08:50:06 crc kubenswrapper[4711]: I0123 08:50:06.237811 4711 scope.go:117] "RemoveContainer" containerID="82171870c0cb8141a1c4cae8531bf8810631bc3139039275203e88621bb52722" Jan 23 08:50:07 crc kubenswrapper[4711]: I0123 08:50:07.484979 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19be5184-7f14-41b5-88a9-bd6f83eecde5" path="/var/lib/kubelet/pods/19be5184-7f14-41b5-88a9-bd6f83eecde5/volumes" Jan 23 08:50:07 crc kubenswrapper[4711]: I0123 08:50:07.485610 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" path="/var/lib/kubelet/pods/1fbd9125-a9d2-4477-a6a9-57a49ac330fb/volumes" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.083342 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.162344 4711 generic.go:334] "Generic (PLEG): container finished" podID="718a4201-2f3d-4471-aecf-f3724cc7ce00" containerID="3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635" exitCode=0 Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.162413 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.162427 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" event={"ID":"718a4201-2f3d-4471-aecf-f3724cc7ce00","Type":"ContainerDied","Data":"3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635"} Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.162455 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-2" event={"ID":"718a4201-2f3d-4471-aecf-f3724cc7ce00","Type":"ContainerDied","Data":"f3783030d60c6c913552c6d71ddfdec0ea92bf3510cb11543e42cb01e42f1697"} Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.162472 4711 scope.go:117] "RemoveContainer" containerID="3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.165755 4711 generic.go:334] "Generic (PLEG): container finished" podID="9287d315-4783-49d4-92a0-730d372a9a58" containerID="c4caecb3a4094896b4afdab10ba38a00b1a5b757ee146e1bc76b917c287788dd" exitCode=0 Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.165788 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" event={"ID":"9287d315-4783-49d4-92a0-730d372a9a58","Type":"ContainerDied","Data":"c4caecb3a4094896b4afdab10ba38a00b1a5b757ee146e1bc76b917c287788dd"} Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.165809 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" event={"ID":"9287d315-4783-49d4-92a0-730d372a9a58","Type":"ContainerDied","Data":"b4b25088a6ef454d8e01bca9debc944e579ed5199468617c72ba35364cf8dbc9"} Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.165819 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4b25088a6ef454d8e01bca9debc944e579ed5199468617c72ba35364cf8dbc9" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.187582 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.205003 4711 scope.go:117] "RemoveContainer" containerID="3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635" Jan 23 08:50:08 crc kubenswrapper[4711]: E0123 08:50:08.207263 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635\": container with ID starting with 3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635 not found: ID does not exist" containerID="3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.207312 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635"} err="failed to get container status \"3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635\": rpc error: code = NotFound desc = could not find container \"3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635\": container with ID starting with 3f08d4b9cce510d1d660f6d531f201cfdab19901a626ddec294eb69e703eb635 not found: ID does not exist" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.275110 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718a4201-2f3d-4471-aecf-f3724cc7ce00-config-data\") pod \"718a4201-2f3d-4471-aecf-f3724cc7ce00\" (UID: \"718a4201-2f3d-4471-aecf-f3724cc7ce00\") " Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.275179 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnzkp\" (UniqueName: \"kubernetes.io/projected/718a4201-2f3d-4471-aecf-f3724cc7ce00-kube-api-access-nnzkp\") pod \"718a4201-2f3d-4471-aecf-f3724cc7ce00\" (UID: \"718a4201-2f3d-4471-aecf-f3724cc7ce00\") " Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.281463 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/718a4201-2f3d-4471-aecf-f3724cc7ce00-kube-api-access-nnzkp" (OuterVolumeSpecName: "kube-api-access-nnzkp") pod "718a4201-2f3d-4471-aecf-f3724cc7ce00" (UID: "718a4201-2f3d-4471-aecf-f3724cc7ce00"). InnerVolumeSpecName "kube-api-access-nnzkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.298440 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/718a4201-2f3d-4471-aecf-f3724cc7ce00-config-data" (OuterVolumeSpecName: "config-data") pod "718a4201-2f3d-4471-aecf-f3724cc7ce00" (UID: "718a4201-2f3d-4471-aecf-f3724cc7ce00"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.376562 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9287d315-4783-49d4-92a0-730d372a9a58-config-data\") pod \"9287d315-4783-49d4-92a0-730d372a9a58\" (UID: \"9287d315-4783-49d4-92a0-730d372a9a58\") " Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.376717 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxpxc\" (UniqueName: \"kubernetes.io/projected/9287d315-4783-49d4-92a0-730d372a9a58-kube-api-access-rxpxc\") pod \"9287d315-4783-49d4-92a0-730d372a9a58\" (UID: \"9287d315-4783-49d4-92a0-730d372a9a58\") " Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.377247 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718a4201-2f3d-4471-aecf-f3724cc7ce00-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.377278 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnzkp\" (UniqueName: \"kubernetes.io/projected/718a4201-2f3d-4471-aecf-f3724cc7ce00-kube-api-access-nnzkp\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.379972 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9287d315-4783-49d4-92a0-730d372a9a58-kube-api-access-rxpxc" (OuterVolumeSpecName: "kube-api-access-rxpxc") pod "9287d315-4783-49d4-92a0-730d372a9a58" (UID: "9287d315-4783-49d4-92a0-730d372a9a58"). InnerVolumeSpecName "kube-api-access-rxpxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.398073 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9287d315-4783-49d4-92a0-730d372a9a58-config-data" (OuterVolumeSpecName: "config-data") pod "9287d315-4783-49d4-92a0-730d372a9a58" (UID: "9287d315-4783-49d4-92a0-730d372a9a58"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.478867 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxpxc\" (UniqueName: \"kubernetes.io/projected/9287d315-4783-49d4-92a0-730d372a9a58-kube-api-access-rxpxc\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.478895 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9287d315-4783-49d4-92a0-730d372a9a58-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.498031 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-2"] Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.506108 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-2"] Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.858668 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-2"] Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.858890 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-2" podUID="70402d14-4b77-41a4-907d-3fda4e66b7cd" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023" gracePeriod=30 Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.878190 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-1"] Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.878427 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-1" podUID="a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://51e40f1f2459117feb29255f35b9f5ff3936b16a1dfdcc5ba369c2094c388d45" gracePeriod=30 Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.888899 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-2"] Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.889140 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-2" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerName="nova-kuttl-metadata-log" containerID="cri-o://5b1ec1492997fddf3f23df2f78ca7cc3e54eb22383f28666c60d02a638fab645" gracePeriod=30 Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.889255 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-2" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerName="nova-kuttl-metadata-metadata" containerID="cri-o://591437a4cd0b764b1ab44e2ca412f7a52c1a6edc8ac8b80e92a1a7372b83023e" gracePeriod=30 Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.900529 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-1"] Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.900795 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-1" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerName="nova-kuttl-metadata-log" containerID="cri-o://0aced10eb8db89b3d6952004e7468ad74e2e73f1e1a429494bddb96459a49d35" gracePeriod=30 Jan 23 08:50:08 crc kubenswrapper[4711]: I0123 08:50:08.900968 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-1" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerName="nova-kuttl-metadata-metadata" containerID="cri-o://60df0aa3d262579d6c41f2ae3c9a66e1ab55fa78629a039b81218a364f17f041" gracePeriod=30 Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.174685 4711 generic.go:334] "Generic (PLEG): container finished" podID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerID="5b1ec1492997fddf3f23df2f78ca7cc3e54eb22383f28666c60d02a638fab645" exitCode=143 Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.174750 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-2" event={"ID":"fdc81782-4865-40e9-82c2-39a2e65fa1e2","Type":"ContainerDied","Data":"5b1ec1492997fddf3f23df2f78ca7cc3e54eb22383f28666c60d02a638fab645"} Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.177645 4711 generic.go:334] "Generic (PLEG): container finished" podID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerID="0aced10eb8db89b3d6952004e7468ad74e2e73f1e1a429494bddb96459a49d35" exitCode=143 Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.177719 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-1" event={"ID":"03f2241c-8262-4dfc-9425-8c48fc2ab7e3","Type":"ContainerDied","Data":"0aced10eb8db89b3d6952004e7468ad74e2e73f1e1a429494bddb96459a49d35"} Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.177748 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-1" Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.213115 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-1"] Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.224873 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-1"] Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.347143 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-2"] Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.347407 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" podUID="e7695748-8afe-456b-bffe-7908726a6ca1" containerName="nova-kuttl-cell1-conductor-conductor" containerID="cri-o://b7e4fd18264161cdf0e0e820d2428400f3d4ca60bb722f703e2e01cdc50959ff" gracePeriod=30 Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.357497 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-1"] Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.357809 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" podUID="13a3efe4-fb91-4f25-b266-84cef3bd94b1" containerName="nova-kuttl-cell1-conductor-conductor" containerID="cri-o://06dc3dccdf9391925cc6b2e757a2102f7ac109f205eeeb4f3a00541e99be56e5" gracePeriod=30 Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.484284 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="718a4201-2f3d-4471-aecf-f3724cc7ce00" path="/var/lib/kubelet/pods/718a4201-2f3d-4471-aecf-f3724cc7ce00/volumes" Jan 23 08:50:09 crc kubenswrapper[4711]: I0123 08:50:09.484857 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9287d315-4783-49d4-92a0-730d372a9a58" path="/var/lib/kubelet/pods/9287d315-4783-49d4-92a0-730d372a9a58/volumes" Jan 23 08:50:10 crc kubenswrapper[4711]: E0123 08:50:10.621857 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:50:10 crc kubenswrapper[4711]: E0123 08:50:10.623770 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:50:10 crc kubenswrapper[4711]: E0123 08:50:10.625799 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:50:10 crc kubenswrapper[4711]: E0123 08:50:10.625873 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-scheduler-2" podUID="70402d14-4b77-41a4-907d-3fda4e66b7cd" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:50:10 crc kubenswrapper[4711]: E0123 08:50:10.638429 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="51e40f1f2459117feb29255f35b9f5ff3936b16a1dfdcc5ba369c2094c388d45" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:50:10 crc kubenswrapper[4711]: E0123 08:50:10.640078 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="51e40f1f2459117feb29255f35b9f5ff3936b16a1dfdcc5ba369c2094c388d45" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:50:10 crc kubenswrapper[4711]: E0123 08:50:10.641169 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="51e40f1f2459117feb29255f35b9f5ff3936b16a1dfdcc5ba369c2094c388d45" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:50:10 crc kubenswrapper[4711]: E0123 08:50:10.641251 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-scheduler-1" podUID="a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.009776 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-metadata-2" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.179:8775/\": read tcp 10.217.0.2:57882->10.217.0.179:8775: read: connection reset by peer" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.009841 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-metadata-2" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.179:8775/\": read tcp 10.217.0.2:57886->10.217.0.179:8775: read: connection reset by peer" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.071403 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-metadata-1" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.178:8775/\": read tcp 10.217.0.2:49432->10.217.0.178:8775: read: connection reset by peer" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.071439 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-metadata-1" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.178:8775/\": read tcp 10.217.0.2:49444->10.217.0.178:8775: read: connection reset by peer" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.204154 4711 generic.go:334] "Generic (PLEG): container finished" podID="13a3efe4-fb91-4f25-b266-84cef3bd94b1" containerID="06dc3dccdf9391925cc6b2e757a2102f7ac109f205eeeb4f3a00541e99be56e5" exitCode=0 Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.204241 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" event={"ID":"13a3efe4-fb91-4f25-b266-84cef3bd94b1","Type":"ContainerDied","Data":"06dc3dccdf9391925cc6b2e757a2102f7ac109f205eeeb4f3a00541e99be56e5"} Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.204289 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" event={"ID":"13a3efe4-fb91-4f25-b266-84cef3bd94b1","Type":"ContainerDied","Data":"8d158b3933170abbd92370a3bed62f30ce9d3e1cd2941dce1b9024144bff9ad2"} Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.204306 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d158b3933170abbd92370a3bed62f30ce9d3e1cd2941dce1b9024144bff9ad2" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.206844 4711 generic.go:334] "Generic (PLEG): container finished" podID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerID="591437a4cd0b764b1ab44e2ca412f7a52c1a6edc8ac8b80e92a1a7372b83023e" exitCode=0 Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.206926 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-2" event={"ID":"fdc81782-4865-40e9-82c2-39a2e65fa1e2","Type":"ContainerDied","Data":"591437a4cd0b764b1ab44e2ca412f7a52c1a6edc8ac8b80e92a1a7372b83023e"} Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.208571 4711 generic.go:334] "Generic (PLEG): container finished" podID="e7695748-8afe-456b-bffe-7908726a6ca1" containerID="b7e4fd18264161cdf0e0e820d2428400f3d4ca60bb722f703e2e01cdc50959ff" exitCode=0 Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.208595 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" event={"ID":"e7695748-8afe-456b-bffe-7908726a6ca1","Type":"ContainerDied","Data":"b7e4fd18264161cdf0e0e820d2428400f3d4ca60bb722f703e2e01cdc50959ff"} Jan 23 08:50:12 crc kubenswrapper[4711]: E0123 08:50:12.375413 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b7e4fd18264161cdf0e0e820d2428400f3d4ca60bb722f703e2e01cdc50959ff is running failed: container process not found" containerID="b7e4fd18264161cdf0e0e820d2428400f3d4ca60bb722f703e2e01cdc50959ff" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:50:12 crc kubenswrapper[4711]: E0123 08:50:12.376499 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b7e4fd18264161cdf0e0e820d2428400f3d4ca60bb722f703e2e01cdc50959ff is running failed: container process not found" containerID="b7e4fd18264161cdf0e0e820d2428400f3d4ca60bb722f703e2e01cdc50959ff" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:50:12 crc kubenswrapper[4711]: E0123 08:50:12.376929 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b7e4fd18264161cdf0e0e820d2428400f3d4ca60bb722f703e2e01cdc50959ff is running failed: container process not found" containerID="b7e4fd18264161cdf0e0e820d2428400f3d4ca60bb722f703e2e01cdc50959ff" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:50:12 crc kubenswrapper[4711]: E0123 08:50:12.377007 4711 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b7e4fd18264161cdf0e0e820d2428400f3d4ca60bb722f703e2e01cdc50959ff is running failed: container process not found" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" podUID="e7695748-8afe-456b-bffe-7908726a6ca1" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:50:12 crc kubenswrapper[4711]: E0123 08:50:12.384826 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 06dc3dccdf9391925cc6b2e757a2102f7ac109f205eeeb4f3a00541e99be56e5 is running failed: container process not found" containerID="06dc3dccdf9391925cc6b2e757a2102f7ac109f205eeeb4f3a00541e99be56e5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:50:12 crc kubenswrapper[4711]: E0123 08:50:12.385488 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 06dc3dccdf9391925cc6b2e757a2102f7ac109f205eeeb4f3a00541e99be56e5 is running failed: container process not found" containerID="06dc3dccdf9391925cc6b2e757a2102f7ac109f205eeeb4f3a00541e99be56e5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:50:12 crc kubenswrapper[4711]: E0123 08:50:12.386165 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 06dc3dccdf9391925cc6b2e757a2102f7ac109f205eeeb4f3a00541e99be56e5 is running failed: container process not found" containerID="06dc3dccdf9391925cc6b2e757a2102f7ac109f205eeeb4f3a00541e99be56e5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:50:12 crc kubenswrapper[4711]: E0123 08:50:12.386199 4711 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 06dc3dccdf9391925cc6b2e757a2102f7ac109f205eeeb4f3a00541e99be56e5 is running failed: container process not found" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" podUID="13a3efe4-fb91-4f25-b266-84cef3bd94b1" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.404295 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.409174 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.548318 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13a3efe4-fb91-4f25-b266-84cef3bd94b1-config-data\") pod \"13a3efe4-fb91-4f25-b266-84cef3bd94b1\" (UID: \"13a3efe4-fb91-4f25-b266-84cef3bd94b1\") " Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.548413 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c752g\" (UniqueName: \"kubernetes.io/projected/e7695748-8afe-456b-bffe-7908726a6ca1-kube-api-access-c752g\") pod \"e7695748-8afe-456b-bffe-7908726a6ca1\" (UID: \"e7695748-8afe-456b-bffe-7908726a6ca1\") " Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.548449 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7695748-8afe-456b-bffe-7908726a6ca1-config-data\") pod \"e7695748-8afe-456b-bffe-7908726a6ca1\" (UID: \"e7695748-8afe-456b-bffe-7908726a6ca1\") " Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.548701 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdjzw\" (UniqueName: \"kubernetes.io/projected/13a3efe4-fb91-4f25-b266-84cef3bd94b1-kube-api-access-jdjzw\") pod \"13a3efe4-fb91-4f25-b266-84cef3bd94b1\" (UID: \"13a3efe4-fb91-4f25-b266-84cef3bd94b1\") " Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.556985 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7695748-8afe-456b-bffe-7908726a6ca1-kube-api-access-c752g" (OuterVolumeSpecName: "kube-api-access-c752g") pod "e7695748-8afe-456b-bffe-7908726a6ca1" (UID: "e7695748-8afe-456b-bffe-7908726a6ca1"). InnerVolumeSpecName "kube-api-access-c752g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.557057 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13a3efe4-fb91-4f25-b266-84cef3bd94b1-kube-api-access-jdjzw" (OuterVolumeSpecName: "kube-api-access-jdjzw") pod "13a3efe4-fb91-4f25-b266-84cef3bd94b1" (UID: "13a3efe4-fb91-4f25-b266-84cef3bd94b1"). InnerVolumeSpecName "kube-api-access-jdjzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.571062 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7695748-8afe-456b-bffe-7908726a6ca1-config-data" (OuterVolumeSpecName: "config-data") pod "e7695748-8afe-456b-bffe-7908726a6ca1" (UID: "e7695748-8afe-456b-bffe-7908726a6ca1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.582219 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13a3efe4-fb91-4f25-b266-84cef3bd94b1-config-data" (OuterVolumeSpecName: "config-data") pod "13a3efe4-fb91-4f25-b266-84cef3bd94b1" (UID: "13a3efe4-fb91-4f25-b266-84cef3bd94b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.650954 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdjzw\" (UniqueName: \"kubernetes.io/projected/13a3efe4-fb91-4f25-b266-84cef3bd94b1-kube-api-access-jdjzw\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.650983 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13a3efe4-fb91-4f25-b266-84cef3bd94b1-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.650992 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c752g\" (UniqueName: \"kubernetes.io/projected/e7695748-8afe-456b-bffe-7908726a6ca1-kube-api-access-c752g\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:12 crc kubenswrapper[4711]: I0123 08:50:12.651000 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7695748-8afe-456b-bffe-7908726a6ca1-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.218731 4711 generic.go:334] "Generic (PLEG): container finished" podID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerID="60df0aa3d262579d6c41f2ae3c9a66e1ab55fa78629a039b81218a364f17f041" exitCode=0 Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.218791 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-1" event={"ID":"03f2241c-8262-4dfc-9425-8c48fc2ab7e3","Type":"ContainerDied","Data":"60df0aa3d262579d6c41f2ae3c9a66e1ab55fa78629a039b81218a364f17f041"} Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.220766 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" event={"ID":"e7695748-8afe-456b-bffe-7908726a6ca1","Type":"ContainerDied","Data":"3eb9a849d328e95c270ac399d44ae87a5007dfa9b9ac175c4de1db3e1acb1341"} Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.220794 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-2" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.220826 4711 scope.go:117] "RemoveContainer" containerID="b7e4fd18264161cdf0e0e820d2428400f3d4ca60bb722f703e2e01cdc50959ff" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.220951 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-1" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.253844 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-1"] Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.263495 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-1"] Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.273178 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-2"] Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.289499 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-2"] Jan 23 08:50:13 crc kubenswrapper[4711]: E0123 08:50:13.312705 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7695748_8afe_456b_bffe_7908726a6ca1.slice\": RecentStats: unable to find data in memory cache]" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.491442 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13a3efe4-fb91-4f25-b266-84cef3bd94b1" path="/var/lib/kubelet/pods/13a3efe4-fb91-4f25-b266-84cef3bd94b1/volumes" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.492094 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7695748-8afe-456b-bffe-7908726a6ca1" path="/var/lib/kubelet/pods/e7695748-8afe-456b-bffe-7908726a6ca1/volumes" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.671564 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.677646 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.776293 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fdc81782-4865-40e9-82c2-39a2e65fa1e2-logs\") pod \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\" (UID: \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\") " Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.776357 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-config-data\") pod \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\" (UID: \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\") " Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.776468 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc81782-4865-40e9-82c2-39a2e65fa1e2-config-data\") pod \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\" (UID: \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\") " Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.776519 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-logs\") pod \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\" (UID: \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\") " Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.776589 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mz4bg\" (UniqueName: \"kubernetes.io/projected/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-kube-api-access-mz4bg\") pod \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\" (UID: \"03f2241c-8262-4dfc-9425-8c48fc2ab7e3\") " Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.776661 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxct6\" (UniqueName: \"kubernetes.io/projected/fdc81782-4865-40e9-82c2-39a2e65fa1e2-kube-api-access-jxct6\") pod \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\" (UID: \"fdc81782-4865-40e9-82c2-39a2e65fa1e2\") " Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.776991 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdc81782-4865-40e9-82c2-39a2e65fa1e2-logs" (OuterVolumeSpecName: "logs") pod "fdc81782-4865-40e9-82c2-39a2e65fa1e2" (UID: "fdc81782-4865-40e9-82c2-39a2e65fa1e2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.777698 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fdc81782-4865-40e9-82c2-39a2e65fa1e2-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.777906 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-logs" (OuterVolumeSpecName: "logs") pod "03f2241c-8262-4dfc-9425-8c48fc2ab7e3" (UID: "03f2241c-8262-4dfc-9425-8c48fc2ab7e3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.781761 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-kube-api-access-mz4bg" (OuterVolumeSpecName: "kube-api-access-mz4bg") pod "03f2241c-8262-4dfc-9425-8c48fc2ab7e3" (UID: "03f2241c-8262-4dfc-9425-8c48fc2ab7e3"). InnerVolumeSpecName "kube-api-access-mz4bg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.781813 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdc81782-4865-40e9-82c2-39a2e65fa1e2-kube-api-access-jxct6" (OuterVolumeSpecName: "kube-api-access-jxct6") pod "fdc81782-4865-40e9-82c2-39a2e65fa1e2" (UID: "fdc81782-4865-40e9-82c2-39a2e65fa1e2"). InnerVolumeSpecName "kube-api-access-jxct6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.820073 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdc81782-4865-40e9-82c2-39a2e65fa1e2-config-data" (OuterVolumeSpecName: "config-data") pod "fdc81782-4865-40e9-82c2-39a2e65fa1e2" (UID: "fdc81782-4865-40e9-82c2-39a2e65fa1e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.820076 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-config-data" (OuterVolumeSpecName: "config-data") pod "03f2241c-8262-4dfc-9425-8c48fc2ab7e3" (UID: "03f2241c-8262-4dfc-9425-8c48fc2ab7e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.878809 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mz4bg\" (UniqueName: \"kubernetes.io/projected/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-kube-api-access-mz4bg\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.878852 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxct6\" (UniqueName: \"kubernetes.io/projected/fdc81782-4865-40e9-82c2-39a2e65fa1e2-kube-api-access-jxct6\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.878868 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.878882 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc81782-4865-40e9-82c2-39a2e65fa1e2-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:13 crc kubenswrapper[4711]: I0123 08:50:13.878894 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03f2241c-8262-4dfc-9425-8c48fc2ab7e3-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.180641 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.234438 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-1" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.235284 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-1" event={"ID":"03f2241c-8262-4dfc-9425-8c48fc2ab7e3","Type":"ContainerDied","Data":"4edba40e45bdffb13f14b99422197614aa2c010e8398466a6fced331796f82f9"} Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.235336 4711 scope.go:117] "RemoveContainer" containerID="60df0aa3d262579d6c41f2ae3c9a66e1ab55fa78629a039b81218a364f17f041" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.242714 4711 generic.go:334] "Generic (PLEG): container finished" podID="70402d14-4b77-41a4-907d-3fda4e66b7cd" containerID="eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023" exitCode=0 Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.242833 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-2" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.243487 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-2" event={"ID":"70402d14-4b77-41a4-907d-3fda4e66b7cd","Type":"ContainerDied","Data":"eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023"} Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.243536 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-2" event={"ID":"70402d14-4b77-41a4-907d-3fda4e66b7cd","Type":"ContainerDied","Data":"a4a4fb741a20b5492d9d245e9fff97af9fbbbbd535c951b12ebe23c37f8e99fd"} Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.246439 4711 generic.go:334] "Generic (PLEG): container finished" podID="a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf" containerID="51e40f1f2459117feb29255f35b9f5ff3936b16a1dfdcc5ba369c2094c388d45" exitCode=0 Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.246483 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-1" event={"ID":"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf","Type":"ContainerDied","Data":"51e40f1f2459117feb29255f35b9f5ff3936b16a1dfdcc5ba369c2094c388d45"} Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.248790 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-2" event={"ID":"fdc81782-4865-40e9-82c2-39a2e65fa1e2","Type":"ContainerDied","Data":"eb9f650e6c3d0da4dd7c34e6731d2dc6b2eb42f3ef9759f6393d5cd21c169bd3"} Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.248901 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-2" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.268984 4711 scope.go:117] "RemoveContainer" containerID="0aced10eb8db89b3d6952004e7468ad74e2e73f1e1a429494bddb96459a49d35" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.273997 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-1"] Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.287770 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-1"] Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.294840 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70402d14-4b77-41a4-907d-3fda4e66b7cd-config-data\") pod \"70402d14-4b77-41a4-907d-3fda4e66b7cd\" (UID: \"70402d14-4b77-41a4-907d-3fda4e66b7cd\") " Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.294952 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzfcn\" (UniqueName: \"kubernetes.io/projected/70402d14-4b77-41a4-907d-3fda4e66b7cd-kube-api-access-bzfcn\") pod \"70402d14-4b77-41a4-907d-3fda4e66b7cd\" (UID: \"70402d14-4b77-41a4-907d-3fda4e66b7cd\") " Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.297777 4711 scope.go:117] "RemoveContainer" containerID="eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.299560 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70402d14-4b77-41a4-907d-3fda4e66b7cd-kube-api-access-bzfcn" (OuterVolumeSpecName: "kube-api-access-bzfcn") pod "70402d14-4b77-41a4-907d-3fda4e66b7cd" (UID: "70402d14-4b77-41a4-907d-3fda4e66b7cd"). InnerVolumeSpecName "kube-api-access-bzfcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.301723 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-2"] Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.307958 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-2"] Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.314197 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70402d14-4b77-41a4-907d-3fda4e66b7cd-config-data" (OuterVolumeSpecName: "config-data") pod "70402d14-4b77-41a4-907d-3fda4e66b7cd" (UID: "70402d14-4b77-41a4-907d-3fda4e66b7cd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.397712 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70402d14-4b77-41a4-907d-3fda4e66b7cd-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.397763 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzfcn\" (UniqueName: \"kubernetes.io/projected/70402d14-4b77-41a4-907d-3fda4e66b7cd-kube-api-access-bzfcn\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.410329 4711 scope.go:117] "RemoveContainer" containerID="eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023" Jan 23 08:50:14 crc kubenswrapper[4711]: E0123 08:50:14.410758 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023\": container with ID starting with eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023 not found: ID does not exist" containerID="eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.410788 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023"} err="failed to get container status \"eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023\": rpc error: code = NotFound desc = could not find container \"eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023\": container with ID starting with eb0f5e38d2c74a2f0a67fa0e4369421f4b33874dc85fa37d2309bc3fc6501023 not found: ID does not exist" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.410809 4711 scope.go:117] "RemoveContainer" containerID="591437a4cd0b764b1ab44e2ca412f7a52c1a6edc8ac8b80e92a1a7372b83023e" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.426870 4711 scope.go:117] "RemoveContainer" containerID="5b1ec1492997fddf3f23df2f78ca7cc3e54eb22383f28666c60d02a638fab645" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.474421 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:50:14 crc kubenswrapper[4711]: E0123 08:50:14.474896 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.578387 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-2"] Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.589619 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-2"] Jan 23 08:50:14 crc kubenswrapper[4711]: I0123 08:50:14.986992 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.107626 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf-config-data\") pod \"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf\" (UID: \"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf\") " Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.108076 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdf6\" (UniqueName: \"kubernetes.io/projected/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf-kube-api-access-zgdf6\") pod \"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf\" (UID: \"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf\") " Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.111762 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf-kube-api-access-zgdf6" (OuterVolumeSpecName: "kube-api-access-zgdf6") pod "a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf" (UID: "a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf"). InnerVolumeSpecName "kube-api-access-zgdf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.135735 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf-config-data" (OuterVolumeSpecName: "config-data") pod "a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf" (UID: "a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.210135 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdf6\" (UniqueName: \"kubernetes.io/projected/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf-kube-api-access-zgdf6\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.210189 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.269068 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-1" event={"ID":"a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf","Type":"ContainerDied","Data":"5589010028ba384fd3cda7b3f3883b7cc564216d70e3522531a2431325ce2e25"} Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.269157 4711 scope.go:117] "RemoveContainer" containerID="51e40f1f2459117feb29255f35b9f5ff3936b16a1dfdcc5ba369c2094c388d45" Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.269229 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-1" Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.322322 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-1"] Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.330764 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-1"] Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.484126 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" path="/var/lib/kubelet/pods/03f2241c-8262-4dfc-9425-8c48fc2ab7e3/volumes" Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.485284 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70402d14-4b77-41a4-907d-3fda4e66b7cd" path="/var/lib/kubelet/pods/70402d14-4b77-41a4-907d-3fda4e66b7cd/volumes" Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.485889 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf" path="/var/lib/kubelet/pods/a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf/volumes" Jan 23 08:50:15 crc kubenswrapper[4711]: I0123 08:50:15.487057 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" path="/var/lib/kubelet/pods/fdc81782-4865-40e9-82c2-39a2e65fa1e2/volumes" Jan 23 08:50:19 crc kubenswrapper[4711]: I0123 08:50:19.425996 4711 scope.go:117] "RemoveContainer" containerID="8ab9761500237919aa5f88c2639738861b193c4e8389e0149265fecc70bd84a0" Jan 23 08:50:19 crc kubenswrapper[4711]: I0123 08:50:19.454679 4711 scope.go:117] "RemoveContainer" containerID="12aad7d7e3601df2d1031155d252b192ee7b6a5c8a6fad4c4094ade950e47290" Jan 23 08:50:19 crc kubenswrapper[4711]: I0123 08:50:19.478646 4711 scope.go:117] "RemoveContainer" containerID="b3f7b13c724fc52560795c7721a240709dc048102991934e8ec91a967703dbb0" Jan 23 08:50:19 crc kubenswrapper[4711]: I0123 08:50:19.511052 4711 scope.go:117] "RemoveContainer" containerID="c568d53916b5f2a2e500cb3f258a5f005a235f7d03b21e197e50e65b6324242f" Jan 23 08:50:19 crc kubenswrapper[4711]: I0123 08:50:19.545228 4711 scope.go:117] "RemoveContainer" containerID="e854188d28149236b862fc1b15d0c857f169663ac94ebaae065f1a2acf040996" Jan 23 08:50:19 crc kubenswrapper[4711]: I0123 08:50:19.578940 4711 scope.go:117] "RemoveContainer" containerID="89a7362b668453fd9e6f188d304eb3dbae1158f3c031a79cb0e2cface0ee1e17" Jan 23 08:50:19 crc kubenswrapper[4711]: I0123 08:50:19.633031 4711 scope.go:117] "RemoveContainer" containerID="6c74a71566fb01e4966e4dffe98c410b6d6c8cdcc562b1d9176018d078548949" Jan 23 08:50:19 crc kubenswrapper[4711]: I0123 08:50:19.648564 4711 scope.go:117] "RemoveContainer" containerID="281a5f793f8779075584aea6071d8d5b8b5eaca314ee5932b5a49611e614de7f" Jan 23 08:50:19 crc kubenswrapper[4711]: I0123 08:50:19.668584 4711 scope.go:117] "RemoveContainer" containerID="6c07aa742c175924fadf2c34a07aa6a32bb93df07ea8b6b5a2d4bc8266bfaea7" Jan 23 08:50:19 crc kubenswrapper[4711]: I0123 08:50:19.688013 4711 scope.go:117] "RemoveContainer" containerID="b639ef42c087042ed6a0a326c87278d2598797e4fe2cca8142d6af4508176d31" Jan 23 08:50:19 crc kubenswrapper[4711]: I0123 08:50:19.703852 4711 scope.go:117] "RemoveContainer" containerID="ca0537f0e82aae5a9122cafb35004a42a6d9435e0a8430695366b0d9e42d63b3" Jan 23 08:50:28 crc kubenswrapper[4711]: I0123 08:50:28.473921 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:50:28 crc kubenswrapper[4711]: E0123 08:50:28.474887 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:50:28 crc kubenswrapper[4711]: I0123 08:50:28.901548 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:50:28 crc kubenswrapper[4711]: I0123 08:50:28.901765 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="bcab875b-6101-462d-a763-1aa2441eecd6" containerName="nova-kuttl-api-log" containerID="cri-o://1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0" gracePeriod=30 Jan 23 08:50:28 crc kubenswrapper[4711]: I0123 08:50:28.901885 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="bcab875b-6101-462d-a763-1aa2441eecd6" containerName="nova-kuttl-api-api" containerID="cri-o://37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073" gracePeriod=30 Jan 23 08:50:29 crc kubenswrapper[4711]: I0123 08:50:29.203314 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:50:29 crc kubenswrapper[4711]: I0123 08:50:29.203538 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" podUID="fc11dc78-b7be-4509-818f-0d4cb6c97931" containerName="nova-kuttl-cell0-conductor-conductor" containerID="cri-o://d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3" gracePeriod=30 Jan 23 08:50:29 crc kubenswrapper[4711]: I0123 08:50:29.391705 4711 generic.go:334] "Generic (PLEG): container finished" podID="bcab875b-6101-462d-a763-1aa2441eecd6" containerID="1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0" exitCode=143 Jan 23 08:50:29 crc kubenswrapper[4711]: I0123 08:50:29.391758 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"bcab875b-6101-462d-a763-1aa2441eecd6","Type":"ContainerDied","Data":"1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0"} Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.041220 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/root-account-create-update-ckwk4"] Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.047930 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/root-account-create-update-ckwk4"] Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.301341 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.360347 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc11dc78-b7be-4509-818f-0d4cb6c97931-config-data\") pod \"fc11dc78-b7be-4509-818f-0d4cb6c97931\" (UID: \"fc11dc78-b7be-4509-818f-0d4cb6c97931\") " Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.360470 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgk9n\" (UniqueName: \"kubernetes.io/projected/fc11dc78-b7be-4509-818f-0d4cb6c97931-kube-api-access-kgk9n\") pod \"fc11dc78-b7be-4509-818f-0d4cb6c97931\" (UID: \"fc11dc78-b7be-4509-818f-0d4cb6c97931\") " Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.368721 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc11dc78-b7be-4509-818f-0d4cb6c97931-kube-api-access-kgk9n" (OuterVolumeSpecName: "kube-api-access-kgk9n") pod "fc11dc78-b7be-4509-818f-0d4cb6c97931" (UID: "fc11dc78-b7be-4509-818f-0d4cb6c97931"). InnerVolumeSpecName "kube-api-access-kgk9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.382180 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc11dc78-b7be-4509-818f-0d4cb6c97931-config-data" (OuterVolumeSpecName: "config-data") pod "fc11dc78-b7be-4509-818f-0d4cb6c97931" (UID: "fc11dc78-b7be-4509-818f-0d4cb6c97931"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.400649 4711 generic.go:334] "Generic (PLEG): container finished" podID="fc11dc78-b7be-4509-818f-0d4cb6c97931" containerID="d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3" exitCode=0 Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.400694 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"fc11dc78-b7be-4509-818f-0d4cb6c97931","Type":"ContainerDied","Data":"d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3"} Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.400723 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"fc11dc78-b7be-4509-818f-0d4cb6c97931","Type":"ContainerDied","Data":"93baa213b8cc19096e00b6bcb0d159dcb4fcf8c8f7453e7a9e138dfb5664842c"} Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.400740 4711 scope.go:117] "RemoveContainer" containerID="d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3" Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.400863 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.447132 4711 scope.go:117] "RemoveContainer" containerID="d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3" Jan 23 08:50:30 crc kubenswrapper[4711]: E0123 08:50:30.448129 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3\": container with ID starting with d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3 not found: ID does not exist" containerID="d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3" Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.448174 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3"} err="failed to get container status \"d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3\": rpc error: code = NotFound desc = could not find container \"d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3\": container with ID starting with d5685990e14dd02746739b6c05bb8ba6bacfb4bccd1022f77cb85f74c0e281b3 not found: ID does not exist" Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.451358 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.458658 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.462296 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgk9n\" (UniqueName: \"kubernetes.io/projected/fc11dc78-b7be-4509-818f-0d4cb6c97931-kube-api-access-kgk9n\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:30 crc kubenswrapper[4711]: I0123 08:50:30.462397 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc11dc78-b7be-4509-818f-0d4cb6c97931-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:31 crc kubenswrapper[4711]: I0123 08:50:31.483813 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aed12545-6ee6-4109-ba42-d89dc6b7939a" path="/var/lib/kubelet/pods/aed12545-6ee6-4109-ba42-d89dc6b7939a/volumes" Jan 23 08:50:31 crc kubenswrapper[4711]: I0123 08:50:31.484903 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc11dc78-b7be-4509-818f-0d4cb6c97931" path="/var/lib/kubelet/pods/fc11dc78-b7be-4509-818f-0d4cb6c97931/volumes" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.106402 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.106649 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="1e26ab32-cb9b-45fc-856c-f58e6742bb74" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974" gracePeriod=30 Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.180665 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.180888 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerName="nova-kuttl-metadata-log" containerID="cri-o://be6268be31b40071e19e801a106475caadd229d1e2192d7b53fed596688a3ff5" gracePeriod=30 Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.181333 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerName="nova-kuttl-metadata-metadata" containerID="cri-o://b5fbe5564ccf547d4add01ced66a73b11f6c83486f8866a46a68cd990b10f895" gracePeriod=30 Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.337467 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.395064 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zn98\" (UniqueName: \"kubernetes.io/projected/bcab875b-6101-462d-a763-1aa2441eecd6-kube-api-access-8zn98\") pod \"bcab875b-6101-462d-a763-1aa2441eecd6\" (UID: \"bcab875b-6101-462d-a763-1aa2441eecd6\") " Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.395189 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcab875b-6101-462d-a763-1aa2441eecd6-config-data\") pod \"bcab875b-6101-462d-a763-1aa2441eecd6\" (UID: \"bcab875b-6101-462d-a763-1aa2441eecd6\") " Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.395796 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bcab875b-6101-462d-a763-1aa2441eecd6-logs\") pod \"bcab875b-6101-462d-a763-1aa2441eecd6\" (UID: \"bcab875b-6101-462d-a763-1aa2441eecd6\") " Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.396587 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcab875b-6101-462d-a763-1aa2441eecd6-logs" (OuterVolumeSpecName: "logs") pod "bcab875b-6101-462d-a763-1aa2441eecd6" (UID: "bcab875b-6101-462d-a763-1aa2441eecd6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.399840 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcab875b-6101-462d-a763-1aa2441eecd6-kube-api-access-8zn98" (OuterVolumeSpecName: "kube-api-access-8zn98") pod "bcab875b-6101-462d-a763-1aa2441eecd6" (UID: "bcab875b-6101-462d-a763-1aa2441eecd6"). InnerVolumeSpecName "kube-api-access-8zn98". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.417475 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcab875b-6101-462d-a763-1aa2441eecd6-config-data" (OuterVolumeSpecName: "config-data") pod "bcab875b-6101-462d-a763-1aa2441eecd6" (UID: "bcab875b-6101-462d-a763-1aa2441eecd6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.426775 4711 generic.go:334] "Generic (PLEG): container finished" podID="bcab875b-6101-462d-a763-1aa2441eecd6" containerID="37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073" exitCode=0 Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.426910 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"bcab875b-6101-462d-a763-1aa2441eecd6","Type":"ContainerDied","Data":"37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073"} Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.426953 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"bcab875b-6101-462d-a763-1aa2441eecd6","Type":"ContainerDied","Data":"c90d3cbf54b43f3a74ece0460d33a0bcd97b9263e34709ef3fa896b44fbabdb6"} Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.426975 4711 scope.go:117] "RemoveContainer" containerID="37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.427113 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.433217 4711 generic.go:334] "Generic (PLEG): container finished" podID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerID="be6268be31b40071e19e801a106475caadd229d1e2192d7b53fed596688a3ff5" exitCode=143 Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.433244 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"74b5783d-fd94-4ad1-b7d6-23a1c223b37d","Type":"ContainerDied","Data":"be6268be31b40071e19e801a106475caadd229d1e2192d7b53fed596688a3ff5"} Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.466523 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.466753 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" podUID="fd86e220-6b63-49cd-bd8c-58a22c39ba68" containerName="nova-kuttl-cell1-conductor-conductor" containerID="cri-o://eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17" gracePeriod=30 Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.484348 4711 scope.go:117] "RemoveContainer" containerID="1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.491640 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.497217 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.497745 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zn98\" (UniqueName: \"kubernetes.io/projected/bcab875b-6101-462d-a763-1aa2441eecd6-kube-api-access-8zn98\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.497773 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcab875b-6101-462d-a763-1aa2441eecd6-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.497784 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bcab875b-6101-462d-a763-1aa2441eecd6-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.507255 4711 scope.go:117] "RemoveContainer" containerID="37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073" Jan 23 08:50:32 crc kubenswrapper[4711]: E0123 08:50:32.507940 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073\": container with ID starting with 37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073 not found: ID does not exist" containerID="37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.508045 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073"} err="failed to get container status \"37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073\": rpc error: code = NotFound desc = could not find container \"37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073\": container with ID starting with 37a90164d812a813de4abbb7b8c39b62747cc52ca514de6e49b5317c37b58073 not found: ID does not exist" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.508077 4711 scope.go:117] "RemoveContainer" containerID="1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0" Jan 23 08:50:32 crc kubenswrapper[4711]: E0123 08:50:32.508934 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0\": container with ID starting with 1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0 not found: ID does not exist" containerID="1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.508969 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0"} err="failed to get container status \"1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0\": rpc error: code = NotFound desc = could not find container \"1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0\": container with ID starting with 1b91e267f2dc3b6a04c3f66b4de9dfb23661fc3fbcc6b67c41f4c08fbd53e8a0 not found: ID does not exist" Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.886693 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs"] Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.898167 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4z6gs"] Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.907026 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt"] Jan 23 08:50:32 crc kubenswrapper[4711]: I0123 08:50:32.917201 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-648xt"] Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.090121 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q"] Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.103929 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-7pd7q"] Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.147260 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/novaapic035-account-delete-prh8m"] Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.148881 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="718a4201-2f3d-4471-aecf-f3724cc7ce00" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.148920 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="718a4201-2f3d-4471-aecf-f3724cc7ce00" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.148941 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" containerName="nova-kuttl-api-api" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.148949 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" containerName="nova-kuttl-api-api" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.148959 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70402d14-4b77-41a4-907d-3fda4e66b7cd" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.148966 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="70402d14-4b77-41a4-907d-3fda4e66b7cd" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.148976 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9287d315-4783-49d4-92a0-730d372a9a58" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.148983 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9287d315-4783-49d4-92a0-730d372a9a58" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.148993 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcab875b-6101-462d-a763-1aa2441eecd6" containerName="nova-kuttl-api-log" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150473 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcab875b-6101-462d-a763-1aa2441eecd6" containerName="nova-kuttl-api-log" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.150500 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19be5184-7f14-41b5-88a9-bd6f83eecde5" containerName="nova-kuttl-api-log" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150525 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="19be5184-7f14-41b5-88a9-bd6f83eecde5" containerName="nova-kuttl-api-log" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.150547 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerName="nova-kuttl-metadata-metadata" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150555 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerName="nova-kuttl-metadata-metadata" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.150568 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19be5184-7f14-41b5-88a9-bd6f83eecde5" containerName="nova-kuttl-api-api" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150576 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="19be5184-7f14-41b5-88a9-bd6f83eecde5" containerName="nova-kuttl-api-api" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.150593 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc11dc78-b7be-4509-818f-0d4cb6c97931" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150601 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc11dc78-b7be-4509-818f-0d4cb6c97931" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.150616 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerName="nova-kuttl-metadata-log" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150623 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerName="nova-kuttl-metadata-log" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.150642 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150650 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.150659 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" containerName="nova-kuttl-api-log" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150667 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" containerName="nova-kuttl-api-log" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.150678 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerName="nova-kuttl-metadata-log" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150686 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerName="nova-kuttl-metadata-log" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.150699 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13a3efe4-fb91-4f25-b266-84cef3bd94b1" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150708 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="13a3efe4-fb91-4f25-b266-84cef3bd94b1" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.150729 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcab875b-6101-462d-a763-1aa2441eecd6" containerName="nova-kuttl-api-api" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150771 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcab875b-6101-462d-a763-1aa2441eecd6" containerName="nova-kuttl-api-api" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.150782 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerName="nova-kuttl-metadata-metadata" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150790 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerName="nova-kuttl-metadata-metadata" Jan 23 08:50:33 crc kubenswrapper[4711]: E0123 08:50:33.150808 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7695748-8afe-456b-bffe-7908726a6ca1" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.150816 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7695748-8afe-456b-bffe-7908726a6ca1" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151095 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7695748-8afe-456b-bffe-7908726a6ca1" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151114 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc11dc78-b7be-4509-818f-0d4cb6c97931" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151124 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerName="nova-kuttl-metadata-log" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151136 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="718a4201-2f3d-4471-aecf-f3724cc7ce00" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151144 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="9287d315-4783-49d4-92a0-730d372a9a58" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151153 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a32c17dc-a3f1-4b49-8b73-ad7ab6af3caf" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151163 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" containerName="nova-kuttl-api-log" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151173 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcab875b-6101-462d-a763-1aa2441eecd6" containerName="nova-kuttl-api-api" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151183 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerName="nova-kuttl-metadata-metadata" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151194 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcab875b-6101-462d-a763-1aa2441eecd6" containerName="nova-kuttl-api-log" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151203 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="19be5184-7f14-41b5-88a9-bd6f83eecde5" containerName="nova-kuttl-api-log" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151234 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="19be5184-7f14-41b5-88a9-bd6f83eecde5" containerName="nova-kuttl-api-api" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151241 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="13a3efe4-fb91-4f25-b266-84cef3bd94b1" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151253 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fbd9125-a9d2-4477-a6a9-57a49ac330fb" containerName="nova-kuttl-api-api" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151265 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="70402d14-4b77-41a4-907d-3fda4e66b7cd" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151277 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdc81782-4865-40e9-82c2-39a2e65fa1e2" containerName="nova-kuttl-metadata-metadata" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.151289 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="03f2241c-8262-4dfc-9425-8c48fc2ab7e3" containerName="nova-kuttl-metadata-log" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.177750 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novaapic035-account-delete-prh8m"] Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.177874 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novaapic035-account-delete-prh8m" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.204111 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/novacell01f13-account-delete-r82p7"] Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.217133 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.261060 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh"] Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.283379 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.283607 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" podUID="25110455-b1b8-4c71-ba91-667073ffc1fa" containerName="nova-kuttl-cell1-novncproxy-novncproxy" containerID="cri-o://328adb3acdaa5d43f27cdee12f97e8a5b4e75490b7ace520bf4af4f7ff02e4c9" gracePeriod=30 Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.310262 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7d8fj\" (UniqueName: \"kubernetes.io/projected/f4b261e5-d405-4859-a886-45d3cd526820-kube-api-access-7d8fj\") pod \"novacell01f13-account-delete-r82p7\" (UID: \"f4b261e5-d405-4859-a886-45d3cd526820\") " pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.310375 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4b261e5-d405-4859-a886-45d3cd526820-operator-scripts\") pod \"novacell01f13-account-delete-r82p7\" (UID: \"f4b261e5-d405-4859-a886-45d3cd526820\") " pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.310411 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chnq2\" (UniqueName: \"kubernetes.io/projected/66658e7c-e0d1-4064-851f-54ef8bcd7395-kube-api-access-chnq2\") pod \"novaapic035-account-delete-prh8m\" (UID: \"66658e7c-e0d1-4064-851f-54ef8bcd7395\") " pod="nova-kuttl-default/novaapic035-account-delete-prh8m" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.310456 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66658e7c-e0d1-4064-851f-54ef8bcd7395-operator-scripts\") pod \"novaapic035-account-delete-prh8m\" (UID: \"66658e7c-e0d1-4064-851f-54ef8bcd7395\") " pod="nova-kuttl-default/novaapic035-account-delete-prh8m" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.329677 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-kmnlh"] Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.345381 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/novacell1ae24-account-delete-k5f27"] Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.346552 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.393372 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novacell01f13-account-delete-r82p7"] Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.401003 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novacell1ae24-account-delete-k5f27"] Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.412018 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7d8fj\" (UniqueName: \"kubernetes.io/projected/f4b261e5-d405-4859-a886-45d3cd526820-kube-api-access-7d8fj\") pod \"novacell01f13-account-delete-r82p7\" (UID: \"f4b261e5-d405-4859-a886-45d3cd526820\") " pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.412082 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1be189d-3629-4b24-8422-d80be213bb50-operator-scripts\") pod \"novacell1ae24-account-delete-k5f27\" (UID: \"c1be189d-3629-4b24-8422-d80be213bb50\") " pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.412146 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4b261e5-d405-4859-a886-45d3cd526820-operator-scripts\") pod \"novacell01f13-account-delete-r82p7\" (UID: \"f4b261e5-d405-4859-a886-45d3cd526820\") " pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.412184 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chnq2\" (UniqueName: \"kubernetes.io/projected/66658e7c-e0d1-4064-851f-54ef8bcd7395-kube-api-access-chnq2\") pod \"novaapic035-account-delete-prh8m\" (UID: \"66658e7c-e0d1-4064-851f-54ef8bcd7395\") " pod="nova-kuttl-default/novaapic035-account-delete-prh8m" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.412227 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66658e7c-e0d1-4064-851f-54ef8bcd7395-operator-scripts\") pod \"novaapic035-account-delete-prh8m\" (UID: \"66658e7c-e0d1-4064-851f-54ef8bcd7395\") " pod="nova-kuttl-default/novaapic035-account-delete-prh8m" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.412266 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-px426\" (UniqueName: \"kubernetes.io/projected/c1be189d-3629-4b24-8422-d80be213bb50-kube-api-access-px426\") pod \"novacell1ae24-account-delete-k5f27\" (UID: \"c1be189d-3629-4b24-8422-d80be213bb50\") " pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.413279 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66658e7c-e0d1-4064-851f-54ef8bcd7395-operator-scripts\") pod \"novaapic035-account-delete-prh8m\" (UID: \"66658e7c-e0d1-4064-851f-54ef8bcd7395\") " pod="nova-kuttl-default/novaapic035-account-delete-prh8m" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.413469 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4b261e5-d405-4859-a886-45d3cd526820-operator-scripts\") pod \"novacell01f13-account-delete-r82p7\" (UID: \"f4b261e5-d405-4859-a886-45d3cd526820\") " pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.431257 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7d8fj\" (UniqueName: \"kubernetes.io/projected/f4b261e5-d405-4859-a886-45d3cd526820-kube-api-access-7d8fj\") pod \"novacell01f13-account-delete-r82p7\" (UID: \"f4b261e5-d405-4859-a886-45d3cd526820\") " pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.432365 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chnq2\" (UniqueName: \"kubernetes.io/projected/66658e7c-e0d1-4064-851f-54ef8bcd7395-kube-api-access-chnq2\") pod \"novaapic035-account-delete-prh8m\" (UID: \"66658e7c-e0d1-4064-851f-54ef8bcd7395\") " pod="nova-kuttl-default/novaapic035-account-delete-prh8m" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.488333 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5269eca1-6667-4df6-8c89-4fafab283186" path="/var/lib/kubelet/pods/5269eca1-6667-4df6-8c89-4fafab283186/volumes" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.489702 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b4b00a4-3b27-43b4-9d01-bc25c89a1176" path="/var/lib/kubelet/pods/5b4b00a4-3b27-43b4-9d01-bc25c89a1176/volumes" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.490534 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83abb416-d991-4a35-a771-a54da2c53e0c" path="/var/lib/kubelet/pods/83abb416-d991-4a35-a771-a54da2c53e0c/volumes" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.491850 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcab875b-6101-462d-a763-1aa2441eecd6" path="/var/lib/kubelet/pods/bcab875b-6101-462d-a763-1aa2441eecd6/volumes" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.493471 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee376276-2fe0-47ce-9002-eae63b9efea2" path="/var/lib/kubelet/pods/ee376276-2fe0-47ce-9002-eae63b9efea2/volumes" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.506685 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novaapic035-account-delete-prh8m" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.513604 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-px426\" (UniqueName: \"kubernetes.io/projected/c1be189d-3629-4b24-8422-d80be213bb50-kube-api-access-px426\") pod \"novacell1ae24-account-delete-k5f27\" (UID: \"c1be189d-3629-4b24-8422-d80be213bb50\") " pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.513726 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1be189d-3629-4b24-8422-d80be213bb50-operator-scripts\") pod \"novacell1ae24-account-delete-k5f27\" (UID: \"c1be189d-3629-4b24-8422-d80be213bb50\") " pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.514633 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1be189d-3629-4b24-8422-d80be213bb50-operator-scripts\") pod \"novacell1ae24-account-delete-k5f27\" (UID: \"c1be189d-3629-4b24-8422-d80be213bb50\") " pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.534547 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-px426\" (UniqueName: \"kubernetes.io/projected/c1be189d-3629-4b24-8422-d80be213bb50-kube-api-access-px426\") pod \"novacell1ae24-account-delete-k5f27\" (UID: \"c1be189d-3629-4b24-8422-d80be213bb50\") " pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.560024 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" Jan 23 08:50:33 crc kubenswrapper[4711]: I0123 08:50:33.676828 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.014565 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novaapic035-account-delete-prh8m"] Jan 23 08:50:34 crc kubenswrapper[4711]: W0123 08:50:34.033352 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66658e7c_e0d1_4064_851f_54ef8bcd7395.slice/crio-49781341063be4f5e5c813c64d97ef0962542ef57e877536ab2e57b62dc7a744 WatchSource:0}: Error finding container 49781341063be4f5e5c813c64d97ef0962542ef57e877536ab2e57b62dc7a744: Status 404 returned error can't find the container with id 49781341063be4f5e5c813c64d97ef0962542ef57e877536ab2e57b62dc7a744 Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.118022 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.190982 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novacell01f13-account-delete-r82p7"] Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.191216 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd86e220-6b63-49cd-bd8c-58a22c39ba68-config-data\") pod \"fd86e220-6b63-49cd-bd8c-58a22c39ba68\" (UID: \"fd86e220-6b63-49cd-bd8c-58a22c39ba68\") " Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.191356 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8f9v\" (UniqueName: \"kubernetes.io/projected/fd86e220-6b63-49cd-bd8c-58a22c39ba68-kube-api-access-t8f9v\") pod \"fd86e220-6b63-49cd-bd8c-58a22c39ba68\" (UID: \"fd86e220-6b63-49cd-bd8c-58a22c39ba68\") " Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.196621 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd86e220-6b63-49cd-bd8c-58a22c39ba68-kube-api-access-t8f9v" (OuterVolumeSpecName: "kube-api-access-t8f9v") pod "fd86e220-6b63-49cd-bd8c-58a22c39ba68" (UID: "fd86e220-6b63-49cd-bd8c-58a22c39ba68"). InnerVolumeSpecName "kube-api-access-t8f9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.220725 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd86e220-6b63-49cd-bd8c-58a22c39ba68-config-data" (OuterVolumeSpecName: "config-data") pod "fd86e220-6b63-49cd-bd8c-58a22c39ba68" (UID: "fd86e220-6b63-49cd-bd8c-58a22c39ba68"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.295772 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd86e220-6b63-49cd-bd8c-58a22c39ba68-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.296535 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8f9v\" (UniqueName: \"kubernetes.io/projected/fd86e220-6b63-49cd-bd8c-58a22c39ba68-kube-api-access-t8f9v\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.314288 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novacell1ae24-account-delete-k5f27"] Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.451770 4711 generic.go:334] "Generic (PLEG): container finished" podID="25110455-b1b8-4c71-ba91-667073ffc1fa" containerID="328adb3acdaa5d43f27cdee12f97e8a5b4e75490b7ace520bf4af4f7ff02e4c9" exitCode=0 Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.451871 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"25110455-b1b8-4c71-ba91-667073ffc1fa","Type":"ContainerDied","Data":"328adb3acdaa5d43f27cdee12f97e8a5b4e75490b7ace520bf4af4f7ff02e4c9"} Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.451969 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"25110455-b1b8-4c71-ba91-667073ffc1fa","Type":"ContainerDied","Data":"424d4b4a3531d00d848fd6ed8acf8ca6830c57a89f9a48d7c9572feb5d08b9dd"} Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.452047 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="424d4b4a3531d00d848fd6ed8acf8ca6830c57a89f9a48d7c9572feb5d08b9dd" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.453058 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novaapic035-account-delete-prh8m" event={"ID":"66658e7c-e0d1-4064-851f-54ef8bcd7395","Type":"ContainerStarted","Data":"49781341063be4f5e5c813c64d97ef0962542ef57e877536ab2e57b62dc7a744"} Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.453949 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" event={"ID":"c1be189d-3629-4b24-8422-d80be213bb50","Type":"ContainerStarted","Data":"5e4bfec1f5ed0914021dac2790e893c75e9ede3b1e2ca4ee5e72b8d113a5e7e8"} Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.455487 4711 generic.go:334] "Generic (PLEG): container finished" podID="fd86e220-6b63-49cd-bd8c-58a22c39ba68" containerID="eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17" exitCode=0 Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.455557 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"fd86e220-6b63-49cd-bd8c-58a22c39ba68","Type":"ContainerDied","Data":"eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17"} Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.455582 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"fd86e220-6b63-49cd-bd8c-58a22c39ba68","Type":"ContainerDied","Data":"e30045e02ec5be8c3cc75118be47b9250acf8e96c43a9536aae950a928364038"} Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.455602 4711 scope.go:117] "RemoveContainer" containerID="eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.455765 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.469947 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" event={"ID":"f4b261e5-d405-4859-a886-45d3cd526820","Type":"ContainerStarted","Data":"c37b0e7263fc11c4744ab0adbdc341d95625ba00b3f180478c3a0b8fc8e49c66"} Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.525615 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.556689 4711 scope.go:117] "RemoveContainer" containerID="eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17" Jan 23 08:50:34 crc kubenswrapper[4711]: E0123 08:50:34.560592 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17\": container with ID starting with eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17 not found: ID does not exist" containerID="eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.560628 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17"} err="failed to get container status \"eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17\": rpc error: code = NotFound desc = could not find container \"eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17\": container with ID starting with eeebacbb91ec4f572f65d66727699439fcf5389db8187a13a3a909c9c4f56f17 not found: ID does not exist" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.560647 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.572872 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.605059 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlzgl\" (UniqueName: \"kubernetes.io/projected/25110455-b1b8-4c71-ba91-667073ffc1fa-kube-api-access-nlzgl\") pod \"25110455-b1b8-4c71-ba91-667073ffc1fa\" (UID: \"25110455-b1b8-4c71-ba91-667073ffc1fa\") " Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.605119 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25110455-b1b8-4c71-ba91-667073ffc1fa-config-data\") pod \"25110455-b1b8-4c71-ba91-667073ffc1fa\" (UID: \"25110455-b1b8-4c71-ba91-667073ffc1fa\") " Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.611773 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25110455-b1b8-4c71-ba91-667073ffc1fa-kube-api-access-nlzgl" (OuterVolumeSpecName: "kube-api-access-nlzgl") pod "25110455-b1b8-4c71-ba91-667073ffc1fa" (UID: "25110455-b1b8-4c71-ba91-667073ffc1fa"). InnerVolumeSpecName "kube-api-access-nlzgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.636140 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25110455-b1b8-4c71-ba91-667073ffc1fa-config-data" (OuterVolumeSpecName: "config-data") pod "25110455-b1b8-4c71-ba91-667073ffc1fa" (UID: "25110455-b1b8-4c71-ba91-667073ffc1fa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.707277 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlzgl\" (UniqueName: \"kubernetes.io/projected/25110455-b1b8-4c71-ba91-667073ffc1fa-kube-api-access-nlzgl\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:34 crc kubenswrapper[4711]: I0123 08:50:34.707352 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25110455-b1b8-4c71-ba91-667073ffc1fa-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.315037 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.170:8775/\": read tcp 10.217.0.2:47766->10.217.0.170:8775: read: connection reset by peer" Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.315122 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.170:8775/\": read tcp 10.217.0.2:47774->10.217.0.170:8775: read: connection reset by peer" Jan 23 08:50:35 crc kubenswrapper[4711]: E0123 08:50:35.491976 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:50:35 crc kubenswrapper[4711]: E0123 08:50:35.495768 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.496611 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd86e220-6b63-49cd-bd8c-58a22c39ba68" path="/var/lib/kubelet/pods/fd86e220-6b63-49cd-bd8c-58a22c39ba68/volumes" Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.496763 4711 generic.go:334] "Generic (PLEG): container finished" podID="c1be189d-3629-4b24-8422-d80be213bb50" containerID="fea0ea333621314e8420200032d19561a341c6c861e4c0491d449bc245c0d205" exitCode=0 Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.497469 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" event={"ID":"c1be189d-3629-4b24-8422-d80be213bb50","Type":"ContainerDied","Data":"fea0ea333621314e8420200032d19561a341c6c861e4c0491d449bc245c0d205"} Jan 23 08:50:35 crc kubenswrapper[4711]: E0123 08:50:35.497674 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:50:35 crc kubenswrapper[4711]: E0123 08:50:35.498375 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="1e26ab32-cb9b-45fc-856c-f58e6742bb74" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.505616 4711 generic.go:334] "Generic (PLEG): container finished" podID="f4b261e5-d405-4859-a886-45d3cd526820" containerID="24524cd01531660f7627b1a911fe4db6cf5ff2f95a98b4dbbf0b748ad7d02881" exitCode=0 Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.505705 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" event={"ID":"f4b261e5-d405-4859-a886-45d3cd526820","Type":"ContainerDied","Data":"24524cd01531660f7627b1a911fe4db6cf5ff2f95a98b4dbbf0b748ad7d02881"} Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.508425 4711 generic.go:334] "Generic (PLEG): container finished" podID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerID="b5fbe5564ccf547d4add01ced66a73b11f6c83486f8866a46a68cd990b10f895" exitCode=0 Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.508516 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"74b5783d-fd94-4ad1-b7d6-23a1c223b37d","Type":"ContainerDied","Data":"b5fbe5564ccf547d4add01ced66a73b11f6c83486f8866a46a68cd990b10f895"} Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.518614 4711 generic.go:334] "Generic (PLEG): container finished" podID="66658e7c-e0d1-4064-851f-54ef8bcd7395" containerID="132dc4a43ff0db0ada9c91fe753f2d6ea8feb1e71e7e83749302512c0aed2088" exitCode=0 Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.518688 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novaapic035-account-delete-prh8m" event={"ID":"66658e7c-e0d1-4064-851f-54ef8bcd7395","Type":"ContainerDied","Data":"132dc4a43ff0db0ada9c91fe753f2d6ea8feb1e71e7e83749302512c0aed2088"} Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.518729 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.572169 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.590809 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.819871 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.931560 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-logs\") pod \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\" (UID: \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\") " Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.931732 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-config-data\") pod \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\" (UID: \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\") " Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.931877 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kcqh\" (UniqueName: \"kubernetes.io/projected/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-kube-api-access-6kcqh\") pod \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\" (UID: \"74b5783d-fd94-4ad1-b7d6-23a1c223b37d\") " Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.932144 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-logs" (OuterVolumeSpecName: "logs") pod "74b5783d-fd94-4ad1-b7d6-23a1c223b37d" (UID: "74b5783d-fd94-4ad1-b7d6-23a1c223b37d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.933042 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.936925 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-kube-api-access-6kcqh" (OuterVolumeSpecName: "kube-api-access-6kcqh") pod "74b5783d-fd94-4ad1-b7d6-23a1c223b37d" (UID: "74b5783d-fd94-4ad1-b7d6-23a1c223b37d"). InnerVolumeSpecName "kube-api-access-6kcqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:35 crc kubenswrapper[4711]: I0123 08:50:35.954841 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-config-data" (OuterVolumeSpecName: "config-data") pod "74b5783d-fd94-4ad1-b7d6-23a1c223b37d" (UID: "74b5783d-fd94-4ad1-b7d6-23a1c223b37d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.034174 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.034214 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kcqh\" (UniqueName: \"kubernetes.io/projected/74b5783d-fd94-4ad1-b7d6-23a1c223b37d-kube-api-access-6kcqh\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.528144 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.528184 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"74b5783d-fd94-4ad1-b7d6-23a1c223b37d","Type":"ContainerDied","Data":"5222ae476c990190778f6045971e60139fa6a6e0973f802ac58a0cf38626e13d"} Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.528428 4711 scope.go:117] "RemoveContainer" containerID="b5fbe5564ccf547d4add01ced66a73b11f6c83486f8866a46a68cd990b10f895" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.553722 4711 scope.go:117] "RemoveContainer" containerID="be6268be31b40071e19e801a106475caadd229d1e2192d7b53fed596688a3ff5" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.559575 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.565148 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.893291 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novaapic035-account-delete-prh8m" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.901806 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.911266 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.948183 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-px426\" (UniqueName: \"kubernetes.io/projected/c1be189d-3629-4b24-8422-d80be213bb50-kube-api-access-px426\") pod \"c1be189d-3629-4b24-8422-d80be213bb50\" (UID: \"c1be189d-3629-4b24-8422-d80be213bb50\") " Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.948219 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7d8fj\" (UniqueName: \"kubernetes.io/projected/f4b261e5-d405-4859-a886-45d3cd526820-kube-api-access-7d8fj\") pod \"f4b261e5-d405-4859-a886-45d3cd526820\" (UID: \"f4b261e5-d405-4859-a886-45d3cd526820\") " Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.948263 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chnq2\" (UniqueName: \"kubernetes.io/projected/66658e7c-e0d1-4064-851f-54ef8bcd7395-kube-api-access-chnq2\") pod \"66658e7c-e0d1-4064-851f-54ef8bcd7395\" (UID: \"66658e7c-e0d1-4064-851f-54ef8bcd7395\") " Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.948307 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1be189d-3629-4b24-8422-d80be213bb50-operator-scripts\") pod \"c1be189d-3629-4b24-8422-d80be213bb50\" (UID: \"c1be189d-3629-4b24-8422-d80be213bb50\") " Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.948332 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4b261e5-d405-4859-a886-45d3cd526820-operator-scripts\") pod \"f4b261e5-d405-4859-a886-45d3cd526820\" (UID: \"f4b261e5-d405-4859-a886-45d3cd526820\") " Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.948353 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66658e7c-e0d1-4064-851f-54ef8bcd7395-operator-scripts\") pod \"66658e7c-e0d1-4064-851f-54ef8bcd7395\" (UID: \"66658e7c-e0d1-4064-851f-54ef8bcd7395\") " Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.950323 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1be189d-3629-4b24-8422-d80be213bb50-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c1be189d-3629-4b24-8422-d80be213bb50" (UID: "c1be189d-3629-4b24-8422-d80be213bb50"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.950642 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66658e7c-e0d1-4064-851f-54ef8bcd7395-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "66658e7c-e0d1-4064-851f-54ef8bcd7395" (UID: "66658e7c-e0d1-4064-851f-54ef8bcd7395"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.950795 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4b261e5-d405-4859-a886-45d3cd526820-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f4b261e5-d405-4859-a886-45d3cd526820" (UID: "f4b261e5-d405-4859-a886-45d3cd526820"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.958407 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66658e7c-e0d1-4064-851f-54ef8bcd7395-kube-api-access-chnq2" (OuterVolumeSpecName: "kube-api-access-chnq2") pod "66658e7c-e0d1-4064-851f-54ef8bcd7395" (UID: "66658e7c-e0d1-4064-851f-54ef8bcd7395"). InnerVolumeSpecName "kube-api-access-chnq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.969532 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4b261e5-d405-4859-a886-45d3cd526820-kube-api-access-7d8fj" (OuterVolumeSpecName: "kube-api-access-7d8fj") pod "f4b261e5-d405-4859-a886-45d3cd526820" (UID: "f4b261e5-d405-4859-a886-45d3cd526820"). InnerVolumeSpecName "kube-api-access-7d8fj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:36 crc kubenswrapper[4711]: I0123 08:50:36.971374 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1be189d-3629-4b24-8422-d80be213bb50-kube-api-access-px426" (OuterVolumeSpecName: "kube-api-access-px426") pod "c1be189d-3629-4b24-8422-d80be213bb50" (UID: "c1be189d-3629-4b24-8422-d80be213bb50"). InnerVolumeSpecName "kube-api-access-px426". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.050217 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-px426\" (UniqueName: \"kubernetes.io/projected/c1be189d-3629-4b24-8422-d80be213bb50-kube-api-access-px426\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.050246 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7d8fj\" (UniqueName: \"kubernetes.io/projected/f4b261e5-d405-4859-a886-45d3cd526820-kube-api-access-7d8fj\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.050256 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chnq2\" (UniqueName: \"kubernetes.io/projected/66658e7c-e0d1-4064-851f-54ef8bcd7395-kube-api-access-chnq2\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.050266 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1be189d-3629-4b24-8422-d80be213bb50-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.050274 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4b261e5-d405-4859-a886-45d3cd526820-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.050282 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66658e7c-e0d1-4064-851f-54ef8bcd7395-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.484281 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25110455-b1b8-4c71-ba91-667073ffc1fa" path="/var/lib/kubelet/pods/25110455-b1b8-4c71-ba91-667073ffc1fa/volumes" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.485490 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" path="/var/lib/kubelet/pods/74b5783d-fd94-4ad1-b7d6-23a1c223b37d/volumes" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.537692 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" event={"ID":"c1be189d-3629-4b24-8422-d80be213bb50","Type":"ContainerDied","Data":"5e4bfec1f5ed0914021dac2790e893c75e9ede3b1e2ca4ee5e72b8d113a5e7e8"} Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.537728 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e4bfec1f5ed0914021dac2790e893c75e9ede3b1e2ca4ee5e72b8d113a5e7e8" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.537748 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell1ae24-account-delete-k5f27" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.539353 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" event={"ID":"f4b261e5-d405-4859-a886-45d3cd526820","Type":"ContainerDied","Data":"c37b0e7263fc11c4744ab0adbdc341d95625ba00b3f180478c3a0b8fc8e49c66"} Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.539393 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c37b0e7263fc11c4744ab0adbdc341d95625ba00b3f180478c3a0b8fc8e49c66" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.539808 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell01f13-account-delete-r82p7" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.542022 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novaapic035-account-delete-prh8m" event={"ID":"66658e7c-e0d1-4064-851f-54ef8bcd7395","Type":"ContainerDied","Data":"49781341063be4f5e5c813c64d97ef0962542ef57e877536ab2e57b62dc7a744"} Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.542051 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49781341063be4f5e5c813c64d97ef0962542ef57e877536ab2e57b62dc7a744" Jan 23 08:50:37 crc kubenswrapper[4711]: I0123 08:50:37.542085 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novaapic035-account-delete-prh8m" Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.151428 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-api-db-create-j56tp"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.158958 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-api-db-create-j56tp"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.180297 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/novaapic035-account-delete-prh8m"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.190868 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-api-c035-account-create-update-lb8vg"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.200768 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/novaapic035-account-delete-prh8m"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.206759 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-api-c035-account-create-update-lb8vg"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.252286 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-rm9fn"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.265019 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-rm9fn"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.274175 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.293477 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/novacell01f13-account-delete-r82p7"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.300782 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/novacell01f13-account-delete-r82p7"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.306904 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell0-1f13-account-create-update-6xgp4"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.360774 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-vhvsz"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.368899 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-vhvsz"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.376818 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/novacell1ae24-account-delete-k5f27"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.384094 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/novacell1ae24-account-delete-k5f27"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.391426 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59"] Jan 23 08:50:38 crc kubenswrapper[4711]: I0123 08:50:38.398129 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell1-ae24-account-create-update-2sg59"] Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.486552 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00a709f3-ebe2-4c67-bf82-13e14b658eb3" path="/var/lib/kubelet/pods/00a709f3-ebe2-4c67-bf82-13e14b658eb3/volumes" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.488796 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23189694-c571-4400-b89b-084ce0ebc613" path="/var/lib/kubelet/pods/23189694-c571-4400-b89b-084ce0ebc613/volumes" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.489465 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66658e7c-e0d1-4064-851f-54ef8bcd7395" path="/var/lib/kubelet/pods/66658e7c-e0d1-4064-851f-54ef8bcd7395/volumes" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.490046 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="900647f1-76d3-4aef-be40-87ed4482b0c6" path="/var/lib/kubelet/pods/900647f1-76d3-4aef-be40-87ed4482b0c6/volumes" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.491020 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9fa2633-02dc-4fff-9308-d70167fd430e" path="/var/lib/kubelet/pods/a9fa2633-02dc-4fff-9308-d70167fd430e/volumes" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.491475 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1be189d-3629-4b24-8422-d80be213bb50" path="/var/lib/kubelet/pods/c1be189d-3629-4b24-8422-d80be213bb50/volumes" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.492008 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e24aad2a-3579-4da8-b755-6e6edee47096" path="/var/lib/kubelet/pods/e24aad2a-3579-4da8-b755-6e6edee47096/volumes" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.493245 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f09a9eb4-8481-496b-96d3-dbd5ac4e2172" path="/var/lib/kubelet/pods/f09a9eb4-8481-496b-96d3-dbd5ac4e2172/volumes" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.493845 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b261e5-d405-4859-a886-45d3cd526820" path="/var/lib/kubelet/pods/f4b261e5-d405-4859-a886-45d3cd526820/volumes" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.534996 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.561308 4711 generic.go:334] "Generic (PLEG): container finished" podID="1e26ab32-cb9b-45fc-856c-f58e6742bb74" containerID="2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974" exitCode=0 Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.561361 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"1e26ab32-cb9b-45fc-856c-f58e6742bb74","Type":"ContainerDied","Data":"2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974"} Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.561391 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"1e26ab32-cb9b-45fc-856c-f58e6742bb74","Type":"ContainerDied","Data":"2b81a247267a5b3fc6dc23bdd302462ce39c3636722229e33a10c7dd3281a222"} Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.561414 4711 scope.go:117] "RemoveContainer" containerID="2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.561576 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.591533 4711 scope.go:117] "RemoveContainer" containerID="2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974" Jan 23 08:50:39 crc kubenswrapper[4711]: E0123 08:50:39.592090 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974\": container with ID starting with 2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974 not found: ID does not exist" containerID="2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.592124 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974"} err="failed to get container status \"2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974\": rpc error: code = NotFound desc = could not find container \"2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974\": container with ID starting with 2f77028668c79eb7edaba655236b213ccb507a10e33d6615b337f124769ca974 not found: ID does not exist" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.690706 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghxxl\" (UniqueName: \"kubernetes.io/projected/1e26ab32-cb9b-45fc-856c-f58e6742bb74-kube-api-access-ghxxl\") pod \"1e26ab32-cb9b-45fc-856c-f58e6742bb74\" (UID: \"1e26ab32-cb9b-45fc-856c-f58e6742bb74\") " Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.690887 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e26ab32-cb9b-45fc-856c-f58e6742bb74-config-data\") pod \"1e26ab32-cb9b-45fc-856c-f58e6742bb74\" (UID: \"1e26ab32-cb9b-45fc-856c-f58e6742bb74\") " Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.705318 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e26ab32-cb9b-45fc-856c-f58e6742bb74-kube-api-access-ghxxl" (OuterVolumeSpecName: "kube-api-access-ghxxl") pod "1e26ab32-cb9b-45fc-856c-f58e6742bb74" (UID: "1e26ab32-cb9b-45fc-856c-f58e6742bb74"). InnerVolumeSpecName "kube-api-access-ghxxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.721269 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e26ab32-cb9b-45fc-856c-f58e6742bb74-config-data" (OuterVolumeSpecName: "config-data") pod "1e26ab32-cb9b-45fc-856c-f58e6742bb74" (UID: "1e26ab32-cb9b-45fc-856c-f58e6742bb74"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.792836 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e26ab32-cb9b-45fc-856c-f58e6742bb74-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.792903 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghxxl\" (UniqueName: \"kubernetes.io/projected/1e26ab32-cb9b-45fc-856c-f58e6742bb74-kube-api-access-ghxxl\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.944568 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:50:39 crc kubenswrapper[4711]: I0123 08:50:39.958182 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.491651 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-api-db-create-xsmh6"] Jan 23 08:50:40 crc kubenswrapper[4711]: E0123 08:50:40.492361 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerName="nova-kuttl-metadata-metadata" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492378 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerName="nova-kuttl-metadata-metadata" Jan 23 08:50:40 crc kubenswrapper[4711]: E0123 08:50:40.492392 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerName="nova-kuttl-metadata-log" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492399 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerName="nova-kuttl-metadata-log" Jan 23 08:50:40 crc kubenswrapper[4711]: E0123 08:50:40.492417 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25110455-b1b8-4c71-ba91-667073ffc1fa" containerName="nova-kuttl-cell1-novncproxy-novncproxy" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492425 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="25110455-b1b8-4c71-ba91-667073ffc1fa" containerName="nova-kuttl-cell1-novncproxy-novncproxy" Jan 23 08:50:40 crc kubenswrapper[4711]: E0123 08:50:40.492441 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e26ab32-cb9b-45fc-856c-f58e6742bb74" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492448 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e26ab32-cb9b-45fc-856c-f58e6742bb74" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:50:40 crc kubenswrapper[4711]: E0123 08:50:40.492461 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd86e220-6b63-49cd-bd8c-58a22c39ba68" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492468 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd86e220-6b63-49cd-bd8c-58a22c39ba68" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:50:40 crc kubenswrapper[4711]: E0123 08:50:40.492483 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1be189d-3629-4b24-8422-d80be213bb50" containerName="mariadb-account-delete" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492492 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1be189d-3629-4b24-8422-d80be213bb50" containerName="mariadb-account-delete" Jan 23 08:50:40 crc kubenswrapper[4711]: E0123 08:50:40.492527 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b261e5-d405-4859-a886-45d3cd526820" containerName="mariadb-account-delete" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492535 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b261e5-d405-4859-a886-45d3cd526820" containerName="mariadb-account-delete" Jan 23 08:50:40 crc kubenswrapper[4711]: E0123 08:50:40.492562 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66658e7c-e0d1-4064-851f-54ef8bcd7395" containerName="mariadb-account-delete" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492569 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="66658e7c-e0d1-4064-851f-54ef8bcd7395" containerName="mariadb-account-delete" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492728 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="66658e7c-e0d1-4064-851f-54ef8bcd7395" containerName="mariadb-account-delete" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492742 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerName="nova-kuttl-metadata-metadata" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492750 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b261e5-d405-4859-a886-45d3cd526820" containerName="mariadb-account-delete" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492757 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e26ab32-cb9b-45fc-856c-f58e6742bb74" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492768 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd86e220-6b63-49cd-bd8c-58a22c39ba68" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492777 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="74b5783d-fd94-4ad1-b7d6-23a1c223b37d" containerName="nova-kuttl-metadata-log" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492787 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1be189d-3629-4b24-8422-d80be213bb50" containerName="mariadb-account-delete" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.492797 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="25110455-b1b8-4c71-ba91-667073ffc1fa" containerName="nova-kuttl-cell1-novncproxy-novncproxy" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.493268 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-xsmh6" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.501053 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-db-create-xsmh6"] Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.503252 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7c00bbe-ae45-418c-8c02-d375fc28602f-operator-scripts\") pod \"nova-api-db-create-xsmh6\" (UID: \"c7c00bbe-ae45-418c-8c02-d375fc28602f\") " pod="nova-kuttl-default/nova-api-db-create-xsmh6" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.503338 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lgvk\" (UniqueName: \"kubernetes.io/projected/c7c00bbe-ae45-418c-8c02-d375fc28602f-kube-api-access-2lgvk\") pod \"nova-api-db-create-xsmh6\" (UID: \"c7c00bbe-ae45-418c-8c02-d375fc28602f\") " pod="nova-kuttl-default/nova-api-db-create-xsmh6" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.590532 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-l6c9k"] Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.591702 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.604338 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lgvk\" (UniqueName: \"kubernetes.io/projected/c7c00bbe-ae45-418c-8c02-d375fc28602f-kube-api-access-2lgvk\") pod \"nova-api-db-create-xsmh6\" (UID: \"c7c00bbe-ae45-418c-8c02-d375fc28602f\") " pod="nova-kuttl-default/nova-api-db-create-xsmh6" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.604416 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cab7ffb-b293-4bc0-9334-670b79f37e44-operator-scripts\") pod \"nova-cell0-db-create-l6c9k\" (UID: \"0cab7ffb-b293-4bc0-9334-670b79f37e44\") " pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.604448 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nd29\" (UniqueName: \"kubernetes.io/projected/0cab7ffb-b293-4bc0-9334-670b79f37e44-kube-api-access-9nd29\") pod \"nova-cell0-db-create-l6c9k\" (UID: \"0cab7ffb-b293-4bc0-9334-670b79f37e44\") " pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.604337 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-l6c9k"] Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.604477 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7c00bbe-ae45-418c-8c02-d375fc28602f-operator-scripts\") pod \"nova-api-db-create-xsmh6\" (UID: \"c7c00bbe-ae45-418c-8c02-d375fc28602f\") " pod="nova-kuttl-default/nova-api-db-create-xsmh6" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.605348 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7c00bbe-ae45-418c-8c02-d375fc28602f-operator-scripts\") pod \"nova-api-db-create-xsmh6\" (UID: \"c7c00bbe-ae45-418c-8c02-d375fc28602f\") " pod="nova-kuttl-default/nova-api-db-create-xsmh6" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.629526 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lgvk\" (UniqueName: \"kubernetes.io/projected/c7c00bbe-ae45-418c-8c02-d375fc28602f-kube-api-access-2lgvk\") pod \"nova-api-db-create-xsmh6\" (UID: \"c7c00bbe-ae45-418c-8c02-d375fc28602f\") " pod="nova-kuttl-default/nova-api-db-create-xsmh6" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.693490 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-api-1145-account-create-update-pjbbf"] Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.694656 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.696820 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-api-db-secret" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.706598 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65cf67a7-9978-446e-a442-4d5aca0072cb-operator-scripts\") pod \"nova-api-1145-account-create-update-pjbbf\" (UID: \"65cf67a7-9978-446e-a442-4d5aca0072cb\") " pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.706867 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcj6x\" (UniqueName: \"kubernetes.io/projected/65cf67a7-9978-446e-a442-4d5aca0072cb-kube-api-access-wcj6x\") pod \"nova-api-1145-account-create-update-pjbbf\" (UID: \"65cf67a7-9978-446e-a442-4d5aca0072cb\") " pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.707021 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cab7ffb-b293-4bc0-9334-670b79f37e44-operator-scripts\") pod \"nova-cell0-db-create-l6c9k\" (UID: \"0cab7ffb-b293-4bc0-9334-670b79f37e44\") " pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.707124 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nd29\" (UniqueName: \"kubernetes.io/projected/0cab7ffb-b293-4bc0-9334-670b79f37e44-kube-api-access-9nd29\") pod \"nova-cell0-db-create-l6c9k\" (UID: \"0cab7ffb-b293-4bc0-9334-670b79f37e44\") " pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.708001 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cab7ffb-b293-4bc0-9334-670b79f37e44-operator-scripts\") pod \"nova-cell0-db-create-l6c9k\" (UID: \"0cab7ffb-b293-4bc0-9334-670b79f37e44\") " pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.711406 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-1145-account-create-update-pjbbf"] Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.738463 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nd29\" (UniqueName: \"kubernetes.io/projected/0cab7ffb-b293-4bc0-9334-670b79f37e44-kube-api-access-9nd29\") pod \"nova-cell0-db-create-l6c9k\" (UID: \"0cab7ffb-b293-4bc0-9334-670b79f37e44\") " pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.791813 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-bshss"] Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.792969 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-bshss" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.815540 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65cf67a7-9978-446e-a442-4d5aca0072cb-operator-scripts\") pod \"nova-api-1145-account-create-update-pjbbf\" (UID: \"65cf67a7-9978-446e-a442-4d5aca0072cb\") " pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.815637 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcj6x\" (UniqueName: \"kubernetes.io/projected/65cf67a7-9978-446e-a442-4d5aca0072cb-kube-api-access-wcj6x\") pod \"nova-api-1145-account-create-update-pjbbf\" (UID: \"65cf67a7-9978-446e-a442-4d5aca0072cb\") " pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.817015 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65cf67a7-9978-446e-a442-4d5aca0072cb-operator-scripts\") pod \"nova-api-1145-account-create-update-pjbbf\" (UID: \"65cf67a7-9978-446e-a442-4d5aca0072cb\") " pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.820702 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-xsmh6" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.821984 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-bshss"] Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.859878 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcj6x\" (UniqueName: \"kubernetes.io/projected/65cf67a7-9978-446e-a442-4d5aca0072cb-kube-api-access-wcj6x\") pod \"nova-api-1145-account-create-update-pjbbf\" (UID: \"65cf67a7-9978-446e-a442-4d5aca0072cb\") " pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.902697 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f"] Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.904041 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.905737 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.913836 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-cell0-db-secret" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.913900 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f"] Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.930599 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tc49\" (UniqueName: \"kubernetes.io/projected/fdf0b61a-9832-45f5-8e29-6d63dd239381-kube-api-access-5tc49\") pod \"nova-cell1-db-create-bshss\" (UID: \"fdf0b61a-9832-45f5-8e29-6d63dd239381\") " pod="nova-kuttl-default/nova-cell1-db-create-bshss" Jan 23 08:50:40 crc kubenswrapper[4711]: I0123 08:50:40.930707 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdf0b61a-9832-45f5-8e29-6d63dd239381-operator-scripts\") pod \"nova-cell1-db-create-bshss\" (UID: \"fdf0b61a-9832-45f5-8e29-6d63dd239381\") " pod="nova-kuttl-default/nova-cell1-db-create-bshss" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.011163 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.032769 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdf0b61a-9832-45f5-8e29-6d63dd239381-operator-scripts\") pod \"nova-cell1-db-create-bshss\" (UID: \"fdf0b61a-9832-45f5-8e29-6d63dd239381\") " pod="nova-kuttl-default/nova-cell1-db-create-bshss" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.032870 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0adb282a-52af-4813-970c-207e19ea7350-operator-scripts\") pod \"nova-cell0-c1fc-account-create-update-ddw8f\" (UID: \"0adb282a-52af-4813-970c-207e19ea7350\") " pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.032907 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tc49\" (UniqueName: \"kubernetes.io/projected/fdf0b61a-9832-45f5-8e29-6d63dd239381-kube-api-access-5tc49\") pod \"nova-cell1-db-create-bshss\" (UID: \"fdf0b61a-9832-45f5-8e29-6d63dd239381\") " pod="nova-kuttl-default/nova-cell1-db-create-bshss" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.032941 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktc67\" (UniqueName: \"kubernetes.io/projected/0adb282a-52af-4813-970c-207e19ea7350-kube-api-access-ktc67\") pod \"nova-cell0-c1fc-account-create-update-ddw8f\" (UID: \"0adb282a-52af-4813-970c-207e19ea7350\") " pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.034647 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdf0b61a-9832-45f5-8e29-6d63dd239381-operator-scripts\") pod \"nova-cell1-db-create-bshss\" (UID: \"fdf0b61a-9832-45f5-8e29-6d63dd239381\") " pod="nova-kuttl-default/nova-cell1-db-create-bshss" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.051959 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tc49\" (UniqueName: \"kubernetes.io/projected/fdf0b61a-9832-45f5-8e29-6d63dd239381-kube-api-access-5tc49\") pod \"nova-cell1-db-create-bshss\" (UID: \"fdf0b61a-9832-45f5-8e29-6d63dd239381\") " pod="nova-kuttl-default/nova-cell1-db-create-bshss" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.105266 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt"] Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.106420 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.111207 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-cell1-db-secret" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.117469 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt"] Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.135210 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-bshss" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.139710 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0adb282a-52af-4813-970c-207e19ea7350-operator-scripts\") pod \"nova-cell0-c1fc-account-create-update-ddw8f\" (UID: \"0adb282a-52af-4813-970c-207e19ea7350\") " pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.139823 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktc67\" (UniqueName: \"kubernetes.io/projected/0adb282a-52af-4813-970c-207e19ea7350-kube-api-access-ktc67\") pod \"nova-cell0-c1fc-account-create-update-ddw8f\" (UID: \"0adb282a-52af-4813-970c-207e19ea7350\") " pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.144217 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0adb282a-52af-4813-970c-207e19ea7350-operator-scripts\") pod \"nova-cell0-c1fc-account-create-update-ddw8f\" (UID: \"0adb282a-52af-4813-970c-207e19ea7350\") " pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.165168 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktc67\" (UniqueName: \"kubernetes.io/projected/0adb282a-52af-4813-970c-207e19ea7350-kube-api-access-ktc67\") pod \"nova-cell0-c1fc-account-create-update-ddw8f\" (UID: \"0adb282a-52af-4813-970c-207e19ea7350\") " pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.244745 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0edf7ac9-2f2a-408b-ab29-a7adf16656ba-operator-scripts\") pod \"nova-cell1-a2a4-account-create-update-sf2bt\" (UID: \"0edf7ac9-2f2a-408b-ab29-a7adf16656ba\") " pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.244816 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrgbn\" (UniqueName: \"kubernetes.io/projected/0edf7ac9-2f2a-408b-ab29-a7adf16656ba-kube-api-access-qrgbn\") pod \"nova-cell1-a2a4-account-create-update-sf2bt\" (UID: \"0edf7ac9-2f2a-408b-ab29-a7adf16656ba\") " pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.247317 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.338434 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-db-create-xsmh6"] Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.346836 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0edf7ac9-2f2a-408b-ab29-a7adf16656ba-operator-scripts\") pod \"nova-cell1-a2a4-account-create-update-sf2bt\" (UID: \"0edf7ac9-2f2a-408b-ab29-a7adf16656ba\") " pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.346943 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrgbn\" (UniqueName: \"kubernetes.io/projected/0edf7ac9-2f2a-408b-ab29-a7adf16656ba-kube-api-access-qrgbn\") pod \"nova-cell1-a2a4-account-create-update-sf2bt\" (UID: \"0edf7ac9-2f2a-408b-ab29-a7adf16656ba\") " pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.349303 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0edf7ac9-2f2a-408b-ab29-a7adf16656ba-operator-scripts\") pod \"nova-cell1-a2a4-account-create-update-sf2bt\" (UID: \"0edf7ac9-2f2a-408b-ab29-a7adf16656ba\") " pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.367566 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrgbn\" (UniqueName: \"kubernetes.io/projected/0edf7ac9-2f2a-408b-ab29-a7adf16656ba-kube-api-access-qrgbn\") pod \"nova-cell1-a2a4-account-create-update-sf2bt\" (UID: \"0edf7ac9-2f2a-408b-ab29-a7adf16656ba\") " pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.431174 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.450781 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-l6c9k"] Jan 23 08:50:41 crc kubenswrapper[4711]: W0123 08:50:41.459964 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0cab7ffb_b293_4bc0_9334_670b79f37e44.slice/crio-f3ee5997012b7a1bdd832a2e7fa1f8eb5f3ba25fa8946c68225c061168f7258d WatchSource:0}: Error finding container f3ee5997012b7a1bdd832a2e7fa1f8eb5f3ba25fa8946c68225c061168f7258d: Status 404 returned error can't find the container with id f3ee5997012b7a1bdd832a2e7fa1f8eb5f3ba25fa8946c68225c061168f7258d Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.476358 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:50:41 crc kubenswrapper[4711]: E0123 08:50:41.476759 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.489759 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e26ab32-cb9b-45fc-856c-f58e6742bb74" path="/var/lib/kubelet/pods/1e26ab32-cb9b-45fc-856c-f58e6742bb74/volumes" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.533308 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-1145-account-create-update-pjbbf"] Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.598450 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" event={"ID":"65cf67a7-9978-446e-a442-4d5aca0072cb","Type":"ContainerStarted","Data":"9edbda145d03fb86446e96082d2df8ea35fbb50b8df3b2189a40209e16735e88"} Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.618040 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-xsmh6" event={"ID":"c7c00bbe-ae45-418c-8c02-d375fc28602f","Type":"ContainerStarted","Data":"bbc643566238ed209a4d4f3b85579b2657ee807144b1f4756f667fd094dcd8e7"} Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.618087 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-xsmh6" event={"ID":"c7c00bbe-ae45-418c-8c02-d375fc28602f","Type":"ContainerStarted","Data":"4629a066a85471ea66c02e6e358310405f3adcdc6d57f100a14ba3d36e16a9a0"} Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.620341 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" event={"ID":"0cab7ffb-b293-4bc0-9334-670b79f37e44","Type":"ContainerStarted","Data":"f3ee5997012b7a1bdd832a2e7fa1f8eb5f3ba25fa8946c68225c061168f7258d"} Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.637038 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-api-db-create-xsmh6" podStartSLOduration=1.637018573 podStartE2EDuration="1.637018573s" podCreationTimestamp="2026-01-23 08:50:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:50:41.634238914 +0000 UTC m=+1827.207195292" watchObservedRunningTime="2026-01-23 08:50:41.637018573 +0000 UTC m=+1827.209974941" Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.660050 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-bshss"] Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.733332 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f"] Jan 23 08:50:41 crc kubenswrapper[4711]: W0123 08:50:41.742700 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0adb282a_52af_4813_970c_207e19ea7350.slice/crio-245b55a723b82bed31cd09e710e066724f5305e21951e950f15a097fd2997b08 WatchSource:0}: Error finding container 245b55a723b82bed31cd09e710e066724f5305e21951e950f15a097fd2997b08: Status 404 returned error can't find the container with id 245b55a723b82bed31cd09e710e066724f5305e21951e950f15a097fd2997b08 Jan 23 08:50:41 crc kubenswrapper[4711]: I0123 08:50:41.909995 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt"] Jan 23 08:50:41 crc kubenswrapper[4711]: W0123 08:50:41.910306 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0edf7ac9_2f2a_408b_ab29_a7adf16656ba.slice/crio-49e947bd70fd0ef476c47c07e20f45fce1b12d0038eda7cb545bc787c7e90bc7 WatchSource:0}: Error finding container 49e947bd70fd0ef476c47c07e20f45fce1b12d0038eda7cb545bc787c7e90bc7: Status 404 returned error can't find the container with id 49e947bd70fd0ef476c47c07e20f45fce1b12d0038eda7cb545bc787c7e90bc7 Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.631471 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" event={"ID":"0adb282a-52af-4813-970c-207e19ea7350","Type":"ContainerStarted","Data":"1b9881e04c1afb44f8fb96c730e8e3c28cea7873e8d2e8ebba63850ad2c6e196"} Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.631622 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" event={"ID":"0adb282a-52af-4813-970c-207e19ea7350","Type":"ContainerStarted","Data":"245b55a723b82bed31cd09e710e066724f5305e21951e950f15a097fd2997b08"} Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.634672 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" event={"ID":"0edf7ac9-2f2a-408b-ab29-a7adf16656ba","Type":"ContainerStarted","Data":"b5751fa7c1a99d51f8231ae4a46c96dc07890862caaa454c056e82d748719d2c"} Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.634710 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" event={"ID":"0edf7ac9-2f2a-408b-ab29-a7adf16656ba","Type":"ContainerStarted","Data":"49e947bd70fd0ef476c47c07e20f45fce1b12d0038eda7cb545bc787c7e90bc7"} Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.637229 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" event={"ID":"0cab7ffb-b293-4bc0-9334-670b79f37e44","Type":"ContainerStarted","Data":"eab49d4e3f0f5e8226ac520556eac418d99a675651681e9df655b1941e49bd9f"} Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.641096 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" event={"ID":"65cf67a7-9978-446e-a442-4d5aca0072cb","Type":"ContainerStarted","Data":"7ac63bbf095552435a63a8a9ba9f83945143d4b9d4ade93f66b8344fc15ce540"} Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.644266 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-bshss" event={"ID":"fdf0b61a-9832-45f5-8e29-6d63dd239381","Type":"ContainerStarted","Data":"5acdb3f73cbb6aebc6a3336c9a3529d7810dd980a5e9c497b46bf78dc0e031d3"} Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.644326 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-bshss" event={"ID":"fdf0b61a-9832-45f5-8e29-6d63dd239381","Type":"ContainerStarted","Data":"0e89f14c9015661442bf14c4247ed83aa14be65b0bb8d71be0cf6bda842c5544"} Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.647029 4711 generic.go:334] "Generic (PLEG): container finished" podID="c7c00bbe-ae45-418c-8c02-d375fc28602f" containerID="bbc643566238ed209a4d4f3b85579b2657ee807144b1f4756f667fd094dcd8e7" exitCode=0 Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.647071 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-xsmh6" event={"ID":"c7c00bbe-ae45-418c-8c02-d375fc28602f","Type":"ContainerDied","Data":"bbc643566238ed209a4d4f3b85579b2657ee807144b1f4756f667fd094dcd8e7"} Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.663585 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" podStartSLOduration=2.663562818 podStartE2EDuration="2.663562818s" podCreationTimestamp="2026-01-23 08:50:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:50:42.657067479 +0000 UTC m=+1828.230023857" watchObservedRunningTime="2026-01-23 08:50:42.663562818 +0000 UTC m=+1828.236519186" Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.676243 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" podStartSLOduration=2.676220308 podStartE2EDuration="2.676220308s" podCreationTimestamp="2026-01-23 08:50:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:50:42.67221679 +0000 UTC m=+1828.245173158" watchObservedRunningTime="2026-01-23 08:50:42.676220308 +0000 UTC m=+1828.249176676" Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.715197 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-cell1-db-create-bshss" podStartSLOduration=2.715175453 podStartE2EDuration="2.715175453s" podCreationTimestamp="2026-01-23 08:50:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:50:42.694371763 +0000 UTC m=+1828.267328131" watchObservedRunningTime="2026-01-23 08:50:42.715175453 +0000 UTC m=+1828.288131821" Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.719199 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" podStartSLOduration=1.719188241 podStartE2EDuration="1.719188241s" podCreationTimestamp="2026-01-23 08:50:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:50:42.712280162 +0000 UTC m=+1828.285236530" watchObservedRunningTime="2026-01-23 08:50:42.719188241 +0000 UTC m=+1828.292144609" Jan 23 08:50:42 crc kubenswrapper[4711]: I0123 08:50:42.737153 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" podStartSLOduration=2.737135151 podStartE2EDuration="2.737135151s" podCreationTimestamp="2026-01-23 08:50:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:50:42.730250832 +0000 UTC m=+1828.303207210" watchObservedRunningTime="2026-01-23 08:50:42.737135151 +0000 UTC m=+1828.310091519" Jan 23 08:50:43 crc kubenswrapper[4711]: I0123 08:50:43.035278 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-db-sync-57tdw"] Jan 23 08:50:43 crc kubenswrapper[4711]: I0123 08:50:43.043185 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-db-sync-57tdw"] Jan 23 08:50:43 crc kubenswrapper[4711]: I0123 08:50:43.482531 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a889480-e905-406c-a80d-a01ddebb3a4a" path="/var/lib/kubelet/pods/3a889480-e905-406c-a80d-a01ddebb3a4a/volumes" Jan 23 08:50:43 crc kubenswrapper[4711]: I0123 08:50:43.657885 4711 generic.go:334] "Generic (PLEG): container finished" podID="0cab7ffb-b293-4bc0-9334-670b79f37e44" containerID="eab49d4e3f0f5e8226ac520556eac418d99a675651681e9df655b1941e49bd9f" exitCode=0 Jan 23 08:50:43 crc kubenswrapper[4711]: I0123 08:50:43.658004 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" event={"ID":"0cab7ffb-b293-4bc0-9334-670b79f37e44","Type":"ContainerDied","Data":"eab49d4e3f0f5e8226ac520556eac418d99a675651681e9df655b1941e49bd9f"} Jan 23 08:50:43 crc kubenswrapper[4711]: I0123 08:50:43.996619 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-xsmh6" Jan 23 08:50:44 crc kubenswrapper[4711]: E0123 08:50:44.065082 4711 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdf0b61a_9832_45f5_8e29_6d63dd239381.slice/crio-conmon-5acdb3f73cbb6aebc6a3336c9a3529d7810dd980a5e9c497b46bf78dc0e031d3.scope\": RecentStats: unable to find data in memory cache]" Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.094519 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lgvk\" (UniqueName: \"kubernetes.io/projected/c7c00bbe-ae45-418c-8c02-d375fc28602f-kube-api-access-2lgvk\") pod \"c7c00bbe-ae45-418c-8c02-d375fc28602f\" (UID: \"c7c00bbe-ae45-418c-8c02-d375fc28602f\") " Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.094581 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7c00bbe-ae45-418c-8c02-d375fc28602f-operator-scripts\") pod \"c7c00bbe-ae45-418c-8c02-d375fc28602f\" (UID: \"c7c00bbe-ae45-418c-8c02-d375fc28602f\") " Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.095352 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7c00bbe-ae45-418c-8c02-d375fc28602f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c7c00bbe-ae45-418c-8c02-d375fc28602f" (UID: "c7c00bbe-ae45-418c-8c02-d375fc28602f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.100878 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7c00bbe-ae45-418c-8c02-d375fc28602f-kube-api-access-2lgvk" (OuterVolumeSpecName: "kube-api-access-2lgvk") pod "c7c00bbe-ae45-418c-8c02-d375fc28602f" (UID: "c7c00bbe-ae45-418c-8c02-d375fc28602f"). InnerVolumeSpecName "kube-api-access-2lgvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.196820 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lgvk\" (UniqueName: \"kubernetes.io/projected/c7c00bbe-ae45-418c-8c02-d375fc28602f-kube-api-access-2lgvk\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.196862 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7c00bbe-ae45-418c-8c02-d375fc28602f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.669092 4711 generic.go:334] "Generic (PLEG): container finished" podID="65cf67a7-9978-446e-a442-4d5aca0072cb" containerID="7ac63bbf095552435a63a8a9ba9f83945143d4b9d4ade93f66b8344fc15ce540" exitCode=0 Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.669163 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" event={"ID":"65cf67a7-9978-446e-a442-4d5aca0072cb","Type":"ContainerDied","Data":"7ac63bbf095552435a63a8a9ba9f83945143d4b9d4ade93f66b8344fc15ce540"} Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.671614 4711 generic.go:334] "Generic (PLEG): container finished" podID="fdf0b61a-9832-45f5-8e29-6d63dd239381" containerID="5acdb3f73cbb6aebc6a3336c9a3529d7810dd980a5e9c497b46bf78dc0e031d3" exitCode=0 Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.671670 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-bshss" event={"ID":"fdf0b61a-9832-45f5-8e29-6d63dd239381","Type":"ContainerDied","Data":"5acdb3f73cbb6aebc6a3336c9a3529d7810dd980a5e9c497b46bf78dc0e031d3"} Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.674033 4711 generic.go:334] "Generic (PLEG): container finished" podID="0adb282a-52af-4813-970c-207e19ea7350" containerID="1b9881e04c1afb44f8fb96c730e8e3c28cea7873e8d2e8ebba63850ad2c6e196" exitCode=0 Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.674127 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" event={"ID":"0adb282a-52af-4813-970c-207e19ea7350","Type":"ContainerDied","Data":"1b9881e04c1afb44f8fb96c730e8e3c28cea7873e8d2e8ebba63850ad2c6e196"} Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.676332 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-xsmh6" event={"ID":"c7c00bbe-ae45-418c-8c02-d375fc28602f","Type":"ContainerDied","Data":"4629a066a85471ea66c02e6e358310405f3adcdc6d57f100a14ba3d36e16a9a0"} Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.676375 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4629a066a85471ea66c02e6e358310405f3adcdc6d57f100a14ba3d36e16a9a0" Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.676404 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-xsmh6" Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.679783 4711 generic.go:334] "Generic (PLEG): container finished" podID="0edf7ac9-2f2a-408b-ab29-a7adf16656ba" containerID="b5751fa7c1a99d51f8231ae4a46c96dc07890862caaa454c056e82d748719d2c" exitCode=0 Jan 23 08:50:44 crc kubenswrapper[4711]: I0123 08:50:44.680038 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" event={"ID":"0edf7ac9-2f2a-408b-ab29-a7adf16656ba","Type":"ContainerDied","Data":"b5751fa7c1a99d51f8231ae4a46c96dc07890862caaa454c056e82d748719d2c"} Jan 23 08:50:45 crc kubenswrapper[4711]: I0123 08:50:45.060154 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" Jan 23 08:50:45 crc kubenswrapper[4711]: I0123 08:50:45.212903 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cab7ffb-b293-4bc0-9334-670b79f37e44-operator-scripts\") pod \"0cab7ffb-b293-4bc0-9334-670b79f37e44\" (UID: \"0cab7ffb-b293-4bc0-9334-670b79f37e44\") " Jan 23 08:50:45 crc kubenswrapper[4711]: I0123 08:50:45.213057 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nd29\" (UniqueName: \"kubernetes.io/projected/0cab7ffb-b293-4bc0-9334-670b79f37e44-kube-api-access-9nd29\") pod \"0cab7ffb-b293-4bc0-9334-670b79f37e44\" (UID: \"0cab7ffb-b293-4bc0-9334-670b79f37e44\") " Jan 23 08:50:45 crc kubenswrapper[4711]: I0123 08:50:45.213642 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cab7ffb-b293-4bc0-9334-670b79f37e44-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0cab7ffb-b293-4bc0-9334-670b79f37e44" (UID: "0cab7ffb-b293-4bc0-9334-670b79f37e44"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:50:45 crc kubenswrapper[4711]: I0123 08:50:45.218097 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cab7ffb-b293-4bc0-9334-670b79f37e44-kube-api-access-9nd29" (OuterVolumeSpecName: "kube-api-access-9nd29") pod "0cab7ffb-b293-4bc0-9334-670b79f37e44" (UID: "0cab7ffb-b293-4bc0-9334-670b79f37e44"). InnerVolumeSpecName "kube-api-access-9nd29". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:45 crc kubenswrapper[4711]: I0123 08:50:45.314761 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nd29\" (UniqueName: \"kubernetes.io/projected/0cab7ffb-b293-4bc0-9334-670b79f37e44-kube-api-access-9nd29\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:45 crc kubenswrapper[4711]: I0123 08:50:45.314815 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cab7ffb-b293-4bc0-9334-670b79f37e44-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:45 crc kubenswrapper[4711]: I0123 08:50:45.692590 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" event={"ID":"0cab7ffb-b293-4bc0-9334-670b79f37e44","Type":"ContainerDied","Data":"f3ee5997012b7a1bdd832a2e7fa1f8eb5f3ba25fa8946c68225c061168f7258d"} Jan 23 08:50:45 crc kubenswrapper[4711]: I0123 08:50:45.692641 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3ee5997012b7a1bdd832a2e7fa1f8eb5f3ba25fa8946c68225c061168f7258d" Jan 23 08:50:45 crc kubenswrapper[4711]: I0123 08:50:45.692817 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-l6c9k" Jan 23 08:50:45 crc kubenswrapper[4711]: I0123 08:50:45.979318 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.125487 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcj6x\" (UniqueName: \"kubernetes.io/projected/65cf67a7-9978-446e-a442-4d5aca0072cb-kube-api-access-wcj6x\") pod \"65cf67a7-9978-446e-a442-4d5aca0072cb\" (UID: \"65cf67a7-9978-446e-a442-4d5aca0072cb\") " Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.125571 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65cf67a7-9978-446e-a442-4d5aca0072cb-operator-scripts\") pod \"65cf67a7-9978-446e-a442-4d5aca0072cb\" (UID: \"65cf67a7-9978-446e-a442-4d5aca0072cb\") " Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.126519 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65cf67a7-9978-446e-a442-4d5aca0072cb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "65cf67a7-9978-446e-a442-4d5aca0072cb" (UID: "65cf67a7-9978-446e-a442-4d5aca0072cb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.130469 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-bshss" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.132159 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65cf67a7-9978-446e-a442-4d5aca0072cb-kube-api-access-wcj6x" (OuterVolumeSpecName: "kube-api-access-wcj6x") pod "65cf67a7-9978-446e-a442-4d5aca0072cb" (UID: "65cf67a7-9978-446e-a442-4d5aca0072cb"). InnerVolumeSpecName "kube-api-access-wcj6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.226080 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.226768 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tc49\" (UniqueName: \"kubernetes.io/projected/fdf0b61a-9832-45f5-8e29-6d63dd239381-kube-api-access-5tc49\") pod \"fdf0b61a-9832-45f5-8e29-6d63dd239381\" (UID: \"fdf0b61a-9832-45f5-8e29-6d63dd239381\") " Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.226861 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdf0b61a-9832-45f5-8e29-6d63dd239381-operator-scripts\") pod \"fdf0b61a-9832-45f5-8e29-6d63dd239381\" (UID: \"fdf0b61a-9832-45f5-8e29-6d63dd239381\") " Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.227179 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/65cf67a7-9978-446e-a442-4d5aca0072cb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.227199 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcj6x\" (UniqueName: \"kubernetes.io/projected/65cf67a7-9978-446e-a442-4d5aca0072cb-kube-api-access-wcj6x\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.227858 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdf0b61a-9832-45f5-8e29-6d63dd239381-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fdf0b61a-9832-45f5-8e29-6d63dd239381" (UID: "fdf0b61a-9832-45f5-8e29-6d63dd239381"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.230368 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdf0b61a-9832-45f5-8e29-6d63dd239381-kube-api-access-5tc49" (OuterVolumeSpecName: "kube-api-access-5tc49") pod "fdf0b61a-9832-45f5-8e29-6d63dd239381" (UID: "fdf0b61a-9832-45f5-8e29-6d63dd239381"). InnerVolumeSpecName "kube-api-access-5tc49". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.231397 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.328207 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrgbn\" (UniqueName: \"kubernetes.io/projected/0edf7ac9-2f2a-408b-ab29-a7adf16656ba-kube-api-access-qrgbn\") pod \"0edf7ac9-2f2a-408b-ab29-a7adf16656ba\" (UID: \"0edf7ac9-2f2a-408b-ab29-a7adf16656ba\") " Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.328386 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0edf7ac9-2f2a-408b-ab29-a7adf16656ba-operator-scripts\") pod \"0edf7ac9-2f2a-408b-ab29-a7adf16656ba\" (UID: \"0edf7ac9-2f2a-408b-ab29-a7adf16656ba\") " Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.328406 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0adb282a-52af-4813-970c-207e19ea7350-operator-scripts\") pod \"0adb282a-52af-4813-970c-207e19ea7350\" (UID: \"0adb282a-52af-4813-970c-207e19ea7350\") " Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.328453 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktc67\" (UniqueName: \"kubernetes.io/projected/0adb282a-52af-4813-970c-207e19ea7350-kube-api-access-ktc67\") pod \"0adb282a-52af-4813-970c-207e19ea7350\" (UID: \"0adb282a-52af-4813-970c-207e19ea7350\") " Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.328760 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tc49\" (UniqueName: \"kubernetes.io/projected/fdf0b61a-9832-45f5-8e29-6d63dd239381-kube-api-access-5tc49\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.328780 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fdf0b61a-9832-45f5-8e29-6d63dd239381-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.329107 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0edf7ac9-2f2a-408b-ab29-a7adf16656ba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0edf7ac9-2f2a-408b-ab29-a7adf16656ba" (UID: "0edf7ac9-2f2a-408b-ab29-a7adf16656ba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.329349 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0adb282a-52af-4813-970c-207e19ea7350-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0adb282a-52af-4813-970c-207e19ea7350" (UID: "0adb282a-52af-4813-970c-207e19ea7350"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.331317 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0adb282a-52af-4813-970c-207e19ea7350-kube-api-access-ktc67" (OuterVolumeSpecName: "kube-api-access-ktc67") pod "0adb282a-52af-4813-970c-207e19ea7350" (UID: "0adb282a-52af-4813-970c-207e19ea7350"). InnerVolumeSpecName "kube-api-access-ktc67". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.331767 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0edf7ac9-2f2a-408b-ab29-a7adf16656ba-kube-api-access-qrgbn" (OuterVolumeSpecName: "kube-api-access-qrgbn") pod "0edf7ac9-2f2a-408b-ab29-a7adf16656ba" (UID: "0edf7ac9-2f2a-408b-ab29-a7adf16656ba"). InnerVolumeSpecName "kube-api-access-qrgbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.429938 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0edf7ac9-2f2a-408b-ab29-a7adf16656ba-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.429981 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0adb282a-52af-4813-970c-207e19ea7350-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.429993 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktc67\" (UniqueName: \"kubernetes.io/projected/0adb282a-52af-4813-970c-207e19ea7350-kube-api-access-ktc67\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.430008 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrgbn\" (UniqueName: \"kubernetes.io/projected/0edf7ac9-2f2a-408b-ab29-a7adf16656ba-kube-api-access-qrgbn\") on node \"crc\" DevicePath \"\"" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.702317 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" event={"ID":"65cf67a7-9978-446e-a442-4d5aca0072cb","Type":"ContainerDied","Data":"9edbda145d03fb86446e96082d2df8ea35fbb50b8df3b2189a40209e16735e88"} Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.702727 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9edbda145d03fb86446e96082d2df8ea35fbb50b8df3b2189a40209e16735e88" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.702342 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-1145-account-create-update-pjbbf" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.704692 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-bshss" event={"ID":"fdf0b61a-9832-45f5-8e29-6d63dd239381","Type":"ContainerDied","Data":"0e89f14c9015661442bf14c4247ed83aa14be65b0bb8d71be0cf6bda842c5544"} Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.704722 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e89f14c9015661442bf14c4247ed83aa14be65b0bb8d71be0cf6bda842c5544" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.704733 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-bshss" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.706301 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" event={"ID":"0adb282a-52af-4813-970c-207e19ea7350","Type":"ContainerDied","Data":"245b55a723b82bed31cd09e710e066724f5305e21951e950f15a097fd2997b08"} Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.706383 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="245b55a723b82bed31cd09e710e066724f5305e21951e950f15a097fd2997b08" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.706522 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.707930 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" event={"ID":"0edf7ac9-2f2a-408b-ab29-a7adf16656ba","Type":"ContainerDied","Data":"49e947bd70fd0ef476c47c07e20f45fce1b12d0038eda7cb545bc787c7e90bc7"} Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.707966 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49e947bd70fd0ef476c47c07e20f45fce1b12d0038eda7cb545bc787c7e90bc7" Jan 23 08:50:46 crc kubenswrapper[4711]: I0123 08:50:46.708017 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.135457 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c"] Jan 23 08:50:51 crc kubenswrapper[4711]: E0123 08:50:51.136120 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65cf67a7-9978-446e-a442-4d5aca0072cb" containerName="mariadb-account-create-update" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.136135 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="65cf67a7-9978-446e-a442-4d5aca0072cb" containerName="mariadb-account-create-update" Jan 23 08:50:51 crc kubenswrapper[4711]: E0123 08:50:51.136151 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0edf7ac9-2f2a-408b-ab29-a7adf16656ba" containerName="mariadb-account-create-update" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.136157 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0edf7ac9-2f2a-408b-ab29-a7adf16656ba" containerName="mariadb-account-create-update" Jan 23 08:50:51 crc kubenswrapper[4711]: E0123 08:50:51.136167 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cab7ffb-b293-4bc0-9334-670b79f37e44" containerName="mariadb-database-create" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.136174 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cab7ffb-b293-4bc0-9334-670b79f37e44" containerName="mariadb-database-create" Jan 23 08:50:51 crc kubenswrapper[4711]: E0123 08:50:51.136180 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7c00bbe-ae45-418c-8c02-d375fc28602f" containerName="mariadb-database-create" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.136187 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7c00bbe-ae45-418c-8c02-d375fc28602f" containerName="mariadb-database-create" Jan 23 08:50:51 crc kubenswrapper[4711]: E0123 08:50:51.136206 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0adb282a-52af-4813-970c-207e19ea7350" containerName="mariadb-account-create-update" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.136217 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0adb282a-52af-4813-970c-207e19ea7350" containerName="mariadb-account-create-update" Jan 23 08:50:51 crc kubenswrapper[4711]: E0123 08:50:51.136227 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdf0b61a-9832-45f5-8e29-6d63dd239381" containerName="mariadb-database-create" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.136233 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdf0b61a-9832-45f5-8e29-6d63dd239381" containerName="mariadb-database-create" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.136412 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="65cf67a7-9978-446e-a442-4d5aca0072cb" containerName="mariadb-account-create-update" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.136428 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cab7ffb-b293-4bc0-9334-670b79f37e44" containerName="mariadb-database-create" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.136448 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0adb282a-52af-4813-970c-207e19ea7350" containerName="mariadb-account-create-update" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.136465 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0edf7ac9-2f2a-408b-ab29-a7adf16656ba" containerName="mariadb-account-create-update" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.136478 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7c00bbe-ae45-418c-8c02-d375fc28602f" containerName="mariadb-database-create" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.136492 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdf0b61a-9832-45f5-8e29-6d63dd239381" containerName="mariadb-database-create" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.137168 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.139975 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-nova-kuttl-dockercfg-nksnh" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.140122 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-config-data" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.140444 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-scripts" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.150102 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c"] Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.205192 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34a63f0b-9154-4762-b7af-d018f5a7d69a-config-data\") pod \"nova-kuttl-cell0-conductor-db-sync-jcd9c\" (UID: \"34a63f0b-9154-4762-b7af-d018f5a7d69a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.205431 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34a63f0b-9154-4762-b7af-d018f5a7d69a-scripts\") pod \"nova-kuttl-cell0-conductor-db-sync-jcd9c\" (UID: \"34a63f0b-9154-4762-b7af-d018f5a7d69a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.205533 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksdgh\" (UniqueName: \"kubernetes.io/projected/34a63f0b-9154-4762-b7af-d018f5a7d69a-kube-api-access-ksdgh\") pod \"nova-kuttl-cell0-conductor-db-sync-jcd9c\" (UID: \"34a63f0b-9154-4762-b7af-d018f5a7d69a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.307747 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34a63f0b-9154-4762-b7af-d018f5a7d69a-config-data\") pod \"nova-kuttl-cell0-conductor-db-sync-jcd9c\" (UID: \"34a63f0b-9154-4762-b7af-d018f5a7d69a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.308049 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34a63f0b-9154-4762-b7af-d018f5a7d69a-scripts\") pod \"nova-kuttl-cell0-conductor-db-sync-jcd9c\" (UID: \"34a63f0b-9154-4762-b7af-d018f5a7d69a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.308185 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksdgh\" (UniqueName: \"kubernetes.io/projected/34a63f0b-9154-4762-b7af-d018f5a7d69a-kube-api-access-ksdgh\") pod \"nova-kuttl-cell0-conductor-db-sync-jcd9c\" (UID: \"34a63f0b-9154-4762-b7af-d018f5a7d69a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.315320 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34a63f0b-9154-4762-b7af-d018f5a7d69a-config-data\") pod \"nova-kuttl-cell0-conductor-db-sync-jcd9c\" (UID: \"34a63f0b-9154-4762-b7af-d018f5a7d69a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.318215 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34a63f0b-9154-4762-b7af-d018f5a7d69a-scripts\") pod \"nova-kuttl-cell0-conductor-db-sync-jcd9c\" (UID: \"34a63f0b-9154-4762-b7af-d018f5a7d69a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.327462 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksdgh\" (UniqueName: \"kubernetes.io/projected/34a63f0b-9154-4762-b7af-d018f5a7d69a-kube-api-access-ksdgh\") pod \"nova-kuttl-cell0-conductor-db-sync-jcd9c\" (UID: \"34a63f0b-9154-4762-b7af-d018f5a7d69a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.374006 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n"] Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.375449 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.383694 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-scripts" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.392908 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-config-data" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.394940 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n"] Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.409283 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnxzw\" (UniqueName: \"kubernetes.io/projected/2916172d-3d37-431e-9311-1846770d5c02-kube-api-access-mnxzw\") pod \"nova-kuttl-cell1-conductor-db-sync-lg52n\" (UID: \"2916172d-3d37-431e-9311-1846770d5c02\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.409628 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2916172d-3d37-431e-9311-1846770d5c02-scripts\") pod \"nova-kuttl-cell1-conductor-db-sync-lg52n\" (UID: \"2916172d-3d37-431e-9311-1846770d5c02\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.409771 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2916172d-3d37-431e-9311-1846770d5c02-config-data\") pod \"nova-kuttl-cell1-conductor-db-sync-lg52n\" (UID: \"2916172d-3d37-431e-9311-1846770d5c02\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.414726 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0"] Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.415855 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.426431 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-compute-fake1-compute-config-data" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.447594 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0"] Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.455890 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.508140 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.509688 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.510903 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2916172d-3d37-431e-9311-1846770d5c02-config-data\") pod \"nova-kuttl-cell1-conductor-db-sync-lg52n\" (UID: \"2916172d-3d37-431e-9311-1846770d5c02\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.510956 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d73adce3-6489-46cd-baca-6b518bfbe671-config-data\") pod \"nova-kuttl-cell1-compute-fake1-compute-0\" (UID: \"d73adce3-6489-46cd-baca-6b518bfbe671\") " pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.511121 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnxzw\" (UniqueName: \"kubernetes.io/projected/2916172d-3d37-431e-9311-1846770d5c02-kube-api-access-mnxzw\") pod \"nova-kuttl-cell1-conductor-db-sync-lg52n\" (UID: \"2916172d-3d37-431e-9311-1846770d5c02\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.511167 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksdld\" (UniqueName: \"kubernetes.io/projected/d73adce3-6489-46cd-baca-6b518bfbe671-kube-api-access-ksdld\") pod \"nova-kuttl-cell1-compute-fake1-compute-0\" (UID: \"d73adce3-6489-46cd-baca-6b518bfbe671\") " pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.511218 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2916172d-3d37-431e-9311-1846770d5c02-scripts\") pod \"nova-kuttl-cell1-conductor-db-sync-lg52n\" (UID: \"2916172d-3d37-431e-9311-1846770d5c02\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.512390 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-novncproxy-config-data" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.520709 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2916172d-3d37-431e-9311-1846770d5c02-config-data\") pod \"nova-kuttl-cell1-conductor-db-sync-lg52n\" (UID: \"2916172d-3d37-431e-9311-1846770d5c02\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.526141 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.526434 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2916172d-3d37-431e-9311-1846770d5c02-scripts\") pod \"nova-kuttl-cell1-conductor-db-sync-lg52n\" (UID: \"2916172d-3d37-431e-9311-1846770d5c02\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.565184 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnxzw\" (UniqueName: \"kubernetes.io/projected/2916172d-3d37-431e-9311-1846770d5c02-kube-api-access-mnxzw\") pod \"nova-kuttl-cell1-conductor-db-sync-lg52n\" (UID: \"2916172d-3d37-431e-9311-1846770d5c02\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.612628 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgf8x\" (UniqueName: \"kubernetes.io/projected/3ecb36fd-83f9-430e-b61f-1a27d907d613-kube-api-access-lgf8x\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"3ecb36fd-83f9-430e-b61f-1a27d907d613\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.613050 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d73adce3-6489-46cd-baca-6b518bfbe671-config-data\") pod \"nova-kuttl-cell1-compute-fake1-compute-0\" (UID: \"d73adce3-6489-46cd-baca-6b518bfbe671\") " pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.613120 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ecb36fd-83f9-430e-b61f-1a27d907d613-config-data\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"3ecb36fd-83f9-430e-b61f-1a27d907d613\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.613183 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksdld\" (UniqueName: \"kubernetes.io/projected/d73adce3-6489-46cd-baca-6b518bfbe671-kube-api-access-ksdld\") pod \"nova-kuttl-cell1-compute-fake1-compute-0\" (UID: \"d73adce3-6489-46cd-baca-6b518bfbe671\") " pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.619980 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d73adce3-6489-46cd-baca-6b518bfbe671-config-data\") pod \"nova-kuttl-cell1-compute-fake1-compute-0\" (UID: \"d73adce3-6489-46cd-baca-6b518bfbe671\") " pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.630994 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksdld\" (UniqueName: \"kubernetes.io/projected/d73adce3-6489-46cd-baca-6b518bfbe671-kube-api-access-ksdld\") pod \"nova-kuttl-cell1-compute-fake1-compute-0\" (UID: \"d73adce3-6489-46cd-baca-6b518bfbe671\") " pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.708565 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.714801 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgf8x\" (UniqueName: \"kubernetes.io/projected/3ecb36fd-83f9-430e-b61f-1a27d907d613-kube-api-access-lgf8x\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"3ecb36fd-83f9-430e-b61f-1a27d907d613\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.714906 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ecb36fd-83f9-430e-b61f-1a27d907d613-config-data\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"3ecb36fd-83f9-430e-b61f-1a27d907d613\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.719283 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ecb36fd-83f9-430e-b61f-1a27d907d613-config-data\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"3ecb36fd-83f9-430e-b61f-1a27d907d613\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.729936 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.737340 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgf8x\" (UniqueName: \"kubernetes.io/projected/3ecb36fd-83f9-430e-b61f-1a27d907d613-kube-api-access-lgf8x\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"3ecb36fd-83f9-430e-b61f-1a27d907d613\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.811428 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c"] Jan 23 08:50:51 crc kubenswrapper[4711]: I0123 08:50:51.894337 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.192211 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n"] Jan 23 08:50:52 crc kubenswrapper[4711]: W0123 08:50:52.193853 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2916172d_3d37_431e_9311_1846770d5c02.slice/crio-280233e72f59f14c0ce8657ed2179b05007934d1156d7088e821f1cbff5d408a WatchSource:0}: Error finding container 280233e72f59f14c0ce8657ed2179b05007934d1156d7088e821f1cbff5d408a: Status 404 returned error can't find the container with id 280233e72f59f14c0ce8657ed2179b05007934d1156d7088e821f1cbff5d408a Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.279540 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0"] Jan 23 08:50:52 crc kubenswrapper[4711]: W0123 08:50:52.281303 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd73adce3_6489_46cd_baca_6b518bfbe671.slice/crio-5ce360d7848fb09fc249734c15ff05305e8808872463cb5dc43abe7986f111a8 WatchSource:0}: Error finding container 5ce360d7848fb09fc249734c15ff05305e8808872463cb5dc43abe7986f111a8: Status 404 returned error can't find the container with id 5ce360d7848fb09fc249734c15ff05305e8808872463cb5dc43abe7986f111a8 Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.284280 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.369747 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:50:52 crc kubenswrapper[4711]: W0123 08:50:52.373997 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ecb36fd_83f9_430e_b61f_1a27d907d613.slice/crio-0d9c25cb21fa3032b5b405a0aa8455575b34f14a50acff25ab92cecebd737ec7 WatchSource:0}: Error finding container 0d9c25cb21fa3032b5b405a0aa8455575b34f14a50acff25ab92cecebd737ec7: Status 404 returned error can't find the container with id 0d9c25cb21fa3032b5b405a0aa8455575b34f14a50acff25ab92cecebd737ec7 Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.768398 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"3ecb36fd-83f9-430e-b61f-1a27d907d613","Type":"ContainerStarted","Data":"eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4"} Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.768455 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"3ecb36fd-83f9-430e-b61f-1a27d907d613","Type":"ContainerStarted","Data":"0d9c25cb21fa3032b5b405a0aa8455575b34f14a50acff25ab92cecebd737ec7"} Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.769852 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" event={"ID":"d73adce3-6489-46cd-baca-6b518bfbe671","Type":"ContainerStarted","Data":"5ce360d7848fb09fc249734c15ff05305e8808872463cb5dc43abe7986f111a8"} Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.771541 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" event={"ID":"34a63f0b-9154-4762-b7af-d018f5a7d69a","Type":"ContainerStarted","Data":"64c0c38b4bd5bf4a3a16cf025f8cc0860572e829c7f231d05f68f05240fb936c"} Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.771585 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" event={"ID":"34a63f0b-9154-4762-b7af-d018f5a7d69a","Type":"ContainerStarted","Data":"c5b73be1898b293cc123ea6e321701daec76fb33c514174bc3d1cca44fc650cb"} Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.773660 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" event={"ID":"2916172d-3d37-431e-9311-1846770d5c02","Type":"ContainerStarted","Data":"ce121608a029f32c4a261f158c9adaae9d5f539921c67dd4bfa9695c380b671f"} Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.773709 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" event={"ID":"2916172d-3d37-431e-9311-1846770d5c02","Type":"ContainerStarted","Data":"280233e72f59f14c0ce8657ed2179b05007934d1156d7088e821f1cbff5d408a"} Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.819814 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" podStartSLOduration=1.819784923 podStartE2EDuration="1.819784923s" podCreationTimestamp="2026-01-23 08:50:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:50:52.79518289 +0000 UTC m=+1838.368139268" watchObservedRunningTime="2026-01-23 08:50:52.819784923 +0000 UTC m=+1838.392741291" Jan 23 08:50:52 crc kubenswrapper[4711]: I0123 08:50:52.822529 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" podStartSLOduration=1.82250134 podStartE2EDuration="1.82250134s" podCreationTimestamp="2026-01-23 08:50:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:50:52.819929577 +0000 UTC m=+1838.392885955" watchObservedRunningTime="2026-01-23 08:50:52.82250134 +0000 UTC m=+1838.395457708" Jan 23 08:50:55 crc kubenswrapper[4711]: I0123 08:50:55.515655 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" podStartSLOduration=4.515626684 podStartE2EDuration="4.515626684s" podCreationTimestamp="2026-01-23 08:50:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:50:53.800523216 +0000 UTC m=+1839.373479604" watchObservedRunningTime="2026-01-23 08:50:55.515626684 +0000 UTC m=+1841.088583052" Jan 23 08:50:56 crc kubenswrapper[4711]: I0123 08:50:56.033602 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/placement-db-sync-qtrk2"] Jan 23 08:50:56 crc kubenswrapper[4711]: I0123 08:50:56.044518 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/placement-db-sync-qtrk2"] Jan 23 08:50:56 crc kubenswrapper[4711]: I0123 08:50:56.474990 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:50:56 crc kubenswrapper[4711]: E0123 08:50:56.475314 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:50:56 crc kubenswrapper[4711]: I0123 08:50:56.895231 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:50:57 crc kubenswrapper[4711]: I0123 08:50:57.485445 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17c1d7d2-2f13-4a96-8b57-ef301be89606" path="/var/lib/kubelet/pods/17c1d7d2-2f13-4a96-8b57-ef301be89606/volumes" Jan 23 08:50:57 crc kubenswrapper[4711]: I0123 08:50:57.834891 4711 generic.go:334] "Generic (PLEG): container finished" podID="2916172d-3d37-431e-9311-1846770d5c02" containerID="ce121608a029f32c4a261f158c9adaae9d5f539921c67dd4bfa9695c380b671f" exitCode=0 Jan 23 08:50:57 crc kubenswrapper[4711]: I0123 08:50:57.835147 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" event={"ID":"2916172d-3d37-431e-9311-1846770d5c02","Type":"ContainerDied","Data":"ce121608a029f32c4a261f158c9adaae9d5f539921c67dd4bfa9695c380b671f"} Jan 23 08:50:58 crc kubenswrapper[4711]: I0123 08:50:58.847930 4711 generic.go:334] "Generic (PLEG): container finished" podID="34a63f0b-9154-4762-b7af-d018f5a7d69a" containerID="64c0c38b4bd5bf4a3a16cf025f8cc0860572e829c7f231d05f68f05240fb936c" exitCode=0 Jan 23 08:50:58 crc kubenswrapper[4711]: I0123 08:50:58.848043 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" event={"ID":"34a63f0b-9154-4762-b7af-d018f5a7d69a","Type":"ContainerDied","Data":"64c0c38b4bd5bf4a3a16cf025f8cc0860572e829c7f231d05f68f05240fb936c"} Jan 23 08:51:01 crc kubenswrapper[4711]: I0123 08:51:01.049847 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-l6szl"] Jan 23 08:51:01 crc kubenswrapper[4711]: I0123 08:51:01.061832 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-l6szl"] Jan 23 08:51:01 crc kubenswrapper[4711]: I0123 08:51:01.485790 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff3509f4-218f-4ca4-8f1a-4533c8b4ca49" path="/var/lib/kubelet/pods/ff3509f4-218f-4ca4-8f1a-4533c8b4ca49/volumes" Jan 23 08:51:01 crc kubenswrapper[4711]: I0123 08:51:01.895553 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:51:01 crc kubenswrapper[4711]: I0123 08:51:01.911019 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:51:02 crc kubenswrapper[4711]: I0123 08:51:02.889266 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.401940 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.408888 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.544878 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksdgh\" (UniqueName: \"kubernetes.io/projected/34a63f0b-9154-4762-b7af-d018f5a7d69a-kube-api-access-ksdgh\") pod \"34a63f0b-9154-4762-b7af-d018f5a7d69a\" (UID: \"34a63f0b-9154-4762-b7af-d018f5a7d69a\") " Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.545300 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2916172d-3d37-431e-9311-1846770d5c02-config-data\") pod \"2916172d-3d37-431e-9311-1846770d5c02\" (UID: \"2916172d-3d37-431e-9311-1846770d5c02\") " Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.545328 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2916172d-3d37-431e-9311-1846770d5c02-scripts\") pod \"2916172d-3d37-431e-9311-1846770d5c02\" (UID: \"2916172d-3d37-431e-9311-1846770d5c02\") " Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.545813 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34a63f0b-9154-4762-b7af-d018f5a7d69a-config-data\") pod \"34a63f0b-9154-4762-b7af-d018f5a7d69a\" (UID: \"34a63f0b-9154-4762-b7af-d018f5a7d69a\") " Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.545991 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnxzw\" (UniqueName: \"kubernetes.io/projected/2916172d-3d37-431e-9311-1846770d5c02-kube-api-access-mnxzw\") pod \"2916172d-3d37-431e-9311-1846770d5c02\" (UID: \"2916172d-3d37-431e-9311-1846770d5c02\") " Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.546058 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34a63f0b-9154-4762-b7af-d018f5a7d69a-scripts\") pod \"34a63f0b-9154-4762-b7af-d018f5a7d69a\" (UID: \"34a63f0b-9154-4762-b7af-d018f5a7d69a\") " Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.549968 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2916172d-3d37-431e-9311-1846770d5c02-scripts" (OuterVolumeSpecName: "scripts") pod "2916172d-3d37-431e-9311-1846770d5c02" (UID: "2916172d-3d37-431e-9311-1846770d5c02"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.549993 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2916172d-3d37-431e-9311-1846770d5c02-kube-api-access-mnxzw" (OuterVolumeSpecName: "kube-api-access-mnxzw") pod "2916172d-3d37-431e-9311-1846770d5c02" (UID: "2916172d-3d37-431e-9311-1846770d5c02"). InnerVolumeSpecName "kube-api-access-mnxzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.550074 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34a63f0b-9154-4762-b7af-d018f5a7d69a-scripts" (OuterVolumeSpecName: "scripts") pod "34a63f0b-9154-4762-b7af-d018f5a7d69a" (UID: "34a63f0b-9154-4762-b7af-d018f5a7d69a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.550412 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34a63f0b-9154-4762-b7af-d018f5a7d69a-kube-api-access-ksdgh" (OuterVolumeSpecName: "kube-api-access-ksdgh") pod "34a63f0b-9154-4762-b7af-d018f5a7d69a" (UID: "34a63f0b-9154-4762-b7af-d018f5a7d69a"). InnerVolumeSpecName "kube-api-access-ksdgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.569202 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34a63f0b-9154-4762-b7af-d018f5a7d69a-config-data" (OuterVolumeSpecName: "config-data") pod "34a63f0b-9154-4762-b7af-d018f5a7d69a" (UID: "34a63f0b-9154-4762-b7af-d018f5a7d69a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.570765 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2916172d-3d37-431e-9311-1846770d5c02-config-data" (OuterVolumeSpecName: "config-data") pod "2916172d-3d37-431e-9311-1846770d5c02" (UID: "2916172d-3d37-431e-9311-1846770d5c02"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.649090 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34a63f0b-9154-4762-b7af-d018f5a7d69a-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.649130 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnxzw\" (UniqueName: \"kubernetes.io/projected/2916172d-3d37-431e-9311-1846770d5c02-kube-api-access-mnxzw\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.649140 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34a63f0b-9154-4762-b7af-d018f5a7d69a-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.649174 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksdgh\" (UniqueName: \"kubernetes.io/projected/34a63f0b-9154-4762-b7af-d018f5a7d69a-kube-api-access-ksdgh\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.649184 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2916172d-3d37-431e-9311-1846770d5c02-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.649192 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2916172d-3d37-431e-9311-1846770d5c02-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.935712 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" event={"ID":"d73adce3-6489-46cd-baca-6b518bfbe671","Type":"ContainerStarted","Data":"5e776a19b3d2f2608e90e4869cb24bcb3c454b63474d8286d258c480e74f47f9"} Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.936857 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.942404 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" event={"ID":"34a63f0b-9154-4762-b7af-d018f5a7d69a","Type":"ContainerDied","Data":"c5b73be1898b293cc123ea6e321701daec76fb33c514174bc3d1cca44fc650cb"} Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.942438 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5b73be1898b293cc123ea6e321701daec76fb33c514174bc3d1cca44fc650cb" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.942416 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.944639 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" event={"ID":"2916172d-3d37-431e-9311-1846770d5c02","Type":"ContainerDied","Data":"280233e72f59f14c0ce8657ed2179b05007934d1156d7088e821f1cbff5d408a"} Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.944670 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="280233e72f59f14c0ce8657ed2179b05007934d1156d7088e821f1cbff5d408a" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.944732 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.955400 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" podStartSLOduration=1.750573358 podStartE2EDuration="14.955383698s" podCreationTimestamp="2026-01-23 08:50:51 +0000 UTC" firstStartedPulling="2026-01-23 08:50:52.283949553 +0000 UTC m=+1837.856905921" lastFinishedPulling="2026-01-23 08:51:05.488759893 +0000 UTC m=+1851.061716261" observedRunningTime="2026-01-23 08:51:05.95420812 +0000 UTC m=+1851.527164498" watchObservedRunningTime="2026-01-23 08:51:05.955383698 +0000 UTC m=+1851.528340066" Jan 23 08:51:05 crc kubenswrapper[4711]: I0123 08:51:05.968077 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.508103 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:51:06 crc kubenswrapper[4711]: E0123 08:51:06.508702 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34a63f0b-9154-4762-b7af-d018f5a7d69a" containerName="nova-kuttl-cell0-conductor-db-sync" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.508714 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="34a63f0b-9154-4762-b7af-d018f5a7d69a" containerName="nova-kuttl-cell0-conductor-db-sync" Jan 23 08:51:06 crc kubenswrapper[4711]: E0123 08:51:06.508727 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2916172d-3d37-431e-9311-1846770d5c02" containerName="nova-kuttl-cell1-conductor-db-sync" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.508733 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="2916172d-3d37-431e-9311-1846770d5c02" containerName="nova-kuttl-cell1-conductor-db-sync" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.508869 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="2916172d-3d37-431e-9311-1846770d5c02" containerName="nova-kuttl-cell1-conductor-db-sync" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.508885 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="34a63f0b-9154-4762-b7af-d018f5a7d69a" containerName="nova-kuttl-cell0-conductor-db-sync" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.509375 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.511470 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-config-data" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.520277 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.616935 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.618157 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.620182 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-config-data" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.635302 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.666051 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa4c6531-526e-4046-bb9f-975b1bc2a361-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"fa4c6531-526e-4046-bb9f-975b1bc2a361\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.666121 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2jq8\" (UniqueName: \"kubernetes.io/projected/fa4c6531-526e-4046-bb9f-975b1bc2a361-kube-api-access-z2jq8\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"fa4c6531-526e-4046-bb9f-975b1bc2a361\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.767377 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt5qv\" (UniqueName: \"kubernetes.io/projected/a91e016e-1671-4b7e-bd99-054776f3d2f8-kube-api-access-bt5qv\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"a91e016e-1671-4b7e-bd99-054776f3d2f8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.767434 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa4c6531-526e-4046-bb9f-975b1bc2a361-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"fa4c6531-526e-4046-bb9f-975b1bc2a361\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.767456 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2jq8\" (UniqueName: \"kubernetes.io/projected/fa4c6531-526e-4046-bb9f-975b1bc2a361-kube-api-access-z2jq8\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"fa4c6531-526e-4046-bb9f-975b1bc2a361\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.768188 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a91e016e-1671-4b7e-bd99-054776f3d2f8-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"a91e016e-1671-4b7e-bd99-054776f3d2f8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.781557 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa4c6531-526e-4046-bb9f-975b1bc2a361-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"fa4c6531-526e-4046-bb9f-975b1bc2a361\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.786960 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2jq8\" (UniqueName: \"kubernetes.io/projected/fa4c6531-526e-4046-bb9f-975b1bc2a361-kube-api-access-z2jq8\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"fa4c6531-526e-4046-bb9f-975b1bc2a361\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.870035 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt5qv\" (UniqueName: \"kubernetes.io/projected/a91e016e-1671-4b7e-bd99-054776f3d2f8-kube-api-access-bt5qv\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"a91e016e-1671-4b7e-bd99-054776f3d2f8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.870098 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a91e016e-1671-4b7e-bd99-054776f3d2f8-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"a91e016e-1671-4b7e-bd99-054776f3d2f8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.874188 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a91e016e-1671-4b7e-bd99-054776f3d2f8-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"a91e016e-1671-4b7e-bd99-054776f3d2f8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.874968 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.891384 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt5qv\" (UniqueName: \"kubernetes.io/projected/a91e016e-1671-4b7e-bd99-054776f3d2f8-kube-api-access-bt5qv\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"a91e016e-1671-4b7e-bd99-054776f3d2f8\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:51:06 crc kubenswrapper[4711]: I0123 08:51:06.933592 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:51:07 crc kubenswrapper[4711]: I0123 08:51:07.206347 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:51:07 crc kubenswrapper[4711]: I0123 08:51:07.318707 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:51:07 crc kubenswrapper[4711]: I0123 08:51:07.474557 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:51:07 crc kubenswrapper[4711]: E0123 08:51:07.474905 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:51:07 crc kubenswrapper[4711]: I0123 08:51:07.962316 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"fa4c6531-526e-4046-bb9f-975b1bc2a361","Type":"ContainerStarted","Data":"6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92"} Jan 23 08:51:07 crc kubenswrapper[4711]: I0123 08:51:07.962715 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:51:07 crc kubenswrapper[4711]: I0123 08:51:07.962731 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"fa4c6531-526e-4046-bb9f-975b1bc2a361","Type":"ContainerStarted","Data":"fbf58935499b2c60fd882cbf929ed536c95f12ec8800594d6b52e7c34c139230"} Jan 23 08:51:07 crc kubenswrapper[4711]: I0123 08:51:07.964215 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"a91e016e-1671-4b7e-bd99-054776f3d2f8","Type":"ContainerStarted","Data":"5761f2d8d00fd3f00a0775d319f904d817d62b08ea0cfec0fcc06471a9106397"} Jan 23 08:51:07 crc kubenswrapper[4711]: I0123 08:51:07.964268 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"a91e016e-1671-4b7e-bd99-054776f3d2f8","Type":"ContainerStarted","Data":"36fe418dd9f2ff93aaecce5b7aa860870dcce7894e56d6435438b89665412db6"} Jan 23 08:51:07 crc kubenswrapper[4711]: I0123 08:51:07.984764 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" podStartSLOduration=1.984744977 podStartE2EDuration="1.984744977s" podCreationTimestamp="2026-01-23 08:51:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:51:07.977995152 +0000 UTC m=+1853.550951560" watchObservedRunningTime="2026-01-23 08:51:07.984744977 +0000 UTC m=+1853.557701345" Jan 23 08:51:07 crc kubenswrapper[4711]: I0123 08:51:07.998434 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" podStartSLOduration=1.99829893 podStartE2EDuration="1.99829893s" podCreationTimestamp="2026-01-23 08:51:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:51:07.994044706 +0000 UTC m=+1853.567001084" watchObservedRunningTime="2026-01-23 08:51:07.99829893 +0000 UTC m=+1853.571255298" Jan 23 08:51:08 crc kubenswrapper[4711]: I0123 08:51:08.974430 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:51:16 crc kubenswrapper[4711]: I0123 08:51:16.908710 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:51:16 crc kubenswrapper[4711]: I0123 08:51:16.957323 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.336233 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6"] Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.337399 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.339186 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-manage-scripts" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.340393 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-manage-config-data" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.349599 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz"] Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.350825 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.362722 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6"] Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.373769 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz"] Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.461340 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a17610-bf27-4760-a41f-f575d1de98ad-config-data\") pod \"nova-kuttl-cell1-cell-mapping-pstd6\" (UID: \"68a17610-bf27-4760-a41f-f575d1de98ad\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.461408 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kqjs\" (UniqueName: \"kubernetes.io/projected/4a43e07a-6038-4e84-8fb0-3163c706ebfd-kube-api-access-6kqjs\") pod \"nova-kuttl-cell1-host-discover-dw6xz\" (UID: \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\") " pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.461460 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hhvn\" (UniqueName: \"kubernetes.io/projected/68a17610-bf27-4760-a41f-f575d1de98ad-kube-api-access-4hhvn\") pod \"nova-kuttl-cell1-cell-mapping-pstd6\" (UID: \"68a17610-bf27-4760-a41f-f575d1de98ad\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.461547 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a43e07a-6038-4e84-8fb0-3163c706ebfd-scripts\") pod \"nova-kuttl-cell1-host-discover-dw6xz\" (UID: \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\") " pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.461594 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68a17610-bf27-4760-a41f-f575d1de98ad-scripts\") pod \"nova-kuttl-cell1-cell-mapping-pstd6\" (UID: \"68a17610-bf27-4760-a41f-f575d1de98ad\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.461617 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a43e07a-6038-4e84-8fb0-3163c706ebfd-config-data\") pod \"nova-kuttl-cell1-host-discover-dw6xz\" (UID: \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\") " pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.562728 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a43e07a-6038-4e84-8fb0-3163c706ebfd-scripts\") pod \"nova-kuttl-cell1-host-discover-dw6xz\" (UID: \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\") " pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.562810 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68a17610-bf27-4760-a41f-f575d1de98ad-scripts\") pod \"nova-kuttl-cell1-cell-mapping-pstd6\" (UID: \"68a17610-bf27-4760-a41f-f575d1de98ad\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.562826 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a43e07a-6038-4e84-8fb0-3163c706ebfd-config-data\") pod \"nova-kuttl-cell1-host-discover-dw6xz\" (UID: \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\") " pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.562922 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a17610-bf27-4760-a41f-f575d1de98ad-config-data\") pod \"nova-kuttl-cell1-cell-mapping-pstd6\" (UID: \"68a17610-bf27-4760-a41f-f575d1de98ad\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.562949 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kqjs\" (UniqueName: \"kubernetes.io/projected/4a43e07a-6038-4e84-8fb0-3163c706ebfd-kube-api-access-6kqjs\") pod \"nova-kuttl-cell1-host-discover-dw6xz\" (UID: \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\") " pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.562990 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hhvn\" (UniqueName: \"kubernetes.io/projected/68a17610-bf27-4760-a41f-f575d1de98ad-kube-api-access-4hhvn\") pod \"nova-kuttl-cell1-cell-mapping-pstd6\" (UID: \"68a17610-bf27-4760-a41f-f575d1de98ad\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.570043 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a43e07a-6038-4e84-8fb0-3163c706ebfd-config-data\") pod \"nova-kuttl-cell1-host-discover-dw6xz\" (UID: \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\") " pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.570555 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a43e07a-6038-4e84-8fb0-3163c706ebfd-scripts\") pod \"nova-kuttl-cell1-host-discover-dw6xz\" (UID: \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\") " pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.570774 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a17610-bf27-4760-a41f-f575d1de98ad-config-data\") pod \"nova-kuttl-cell1-cell-mapping-pstd6\" (UID: \"68a17610-bf27-4760-a41f-f575d1de98ad\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.570868 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68a17610-bf27-4760-a41f-f575d1de98ad-scripts\") pod \"nova-kuttl-cell1-cell-mapping-pstd6\" (UID: \"68a17610-bf27-4760-a41f-f575d1de98ad\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.585494 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kqjs\" (UniqueName: \"kubernetes.io/projected/4a43e07a-6038-4e84-8fb0-3163c706ebfd-kube-api-access-6kqjs\") pod \"nova-kuttl-cell1-host-discover-dw6xz\" (UID: \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\") " pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.587928 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hhvn\" (UniqueName: \"kubernetes.io/projected/68a17610-bf27-4760-a41f-f575d1de98ad-kube-api-access-4hhvn\") pod \"nova-kuttl-cell1-cell-mapping-pstd6\" (UID: \"68a17610-bf27-4760-a41f-f575d1de98ad\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.664562 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.675487 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.758553 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75"] Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.759759 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.762606 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-manage-config-data" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.764084 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-manage-scripts" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.769933 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75"] Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.868615 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/516a75f8-e3ec-4723-ab0e-e0f2656077e8-config-data\") pod \"nova-kuttl-cell0-cell-mapping-ggc75\" (UID: \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.868991 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59s7d\" (UniqueName: \"kubernetes.io/projected/516a75f8-e3ec-4723-ab0e-e0f2656077e8-kube-api-access-59s7d\") pod \"nova-kuttl-cell0-cell-mapping-ggc75\" (UID: \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.869015 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/516a75f8-e3ec-4723-ab0e-e0f2656077e8-scripts\") pod \"nova-kuttl-cell0-cell-mapping-ggc75\" (UID: \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.878240 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.880798 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.882984 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-api-config-data" Jan 23 08:51:17 crc kubenswrapper[4711]: I0123 08:51:17.891438 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:17.972036 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/516a75f8-e3ec-4723-ab0e-e0f2656077e8-config-data\") pod \"nova-kuttl-cell0-cell-mapping-ggc75\" (UID: \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:17.972121 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59s7d\" (UniqueName: \"kubernetes.io/projected/516a75f8-e3ec-4723-ab0e-e0f2656077e8-kube-api-access-59s7d\") pod \"nova-kuttl-cell0-cell-mapping-ggc75\" (UID: \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:17.972141 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/516a75f8-e3ec-4723-ab0e-e0f2656077e8-scripts\") pod \"nova-kuttl-cell0-cell-mapping-ggc75\" (UID: \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:17.972168 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frqk7\" (UniqueName: \"kubernetes.io/projected/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-kube-api-access-frqk7\") pod \"nova-kuttl-api-0\" (UID: \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:17.972223 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-logs\") pod \"nova-kuttl-api-0\" (UID: \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:17.972242 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-config-data\") pod \"nova-kuttl-api-0\" (UID: \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:17.982071 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/516a75f8-e3ec-4723-ab0e-e0f2656077e8-scripts\") pod \"nova-kuttl-cell0-cell-mapping-ggc75\" (UID: \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:17.982557 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/516a75f8-e3ec-4723-ab0e-e0f2656077e8-config-data\") pod \"nova-kuttl-cell0-cell-mapping-ggc75\" (UID: \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:17.987772 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:17.988795 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:17.994209 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-scheduler-config-data" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.007898 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59s7d\" (UniqueName: \"kubernetes.io/projected/516a75f8-e3ec-4723-ab0e-e0f2656077e8-kube-api-access-59s7d\") pod \"nova-kuttl-cell0-cell-mapping-ggc75\" (UID: \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.010351 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.022657 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.024797 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.034539 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-metadata-config-data" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.074195 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-logs\") pod \"nova-kuttl-api-0\" (UID: \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.074239 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-config-data\") pod \"nova-kuttl-api-0\" (UID: \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.074282 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69h7w\" (UniqueName: \"kubernetes.io/projected/844b0520-90a3-42ea-ae7e-d344dad65dc6-kube-api-access-69h7w\") pod \"nova-kuttl-metadata-0\" (UID: \"844b0520-90a3-42ea-ae7e-d344dad65dc6\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.074348 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/844b0520-90a3-42ea-ae7e-d344dad65dc6-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"844b0520-90a3-42ea-ae7e-d344dad65dc6\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.074373 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ccdca19-e7ab-4635-8b7b-72d4690b0632-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"1ccdca19-e7ab-4635-8b7b-72d4690b0632\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.074414 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/844b0520-90a3-42ea-ae7e-d344dad65dc6-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"844b0520-90a3-42ea-ae7e-d344dad65dc6\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.074447 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frqk7\" (UniqueName: \"kubernetes.io/projected/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-kube-api-access-frqk7\") pod \"nova-kuttl-api-0\" (UID: \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.074477 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrvmp\" (UniqueName: \"kubernetes.io/projected/1ccdca19-e7ab-4635-8b7b-72d4690b0632-kube-api-access-vrvmp\") pod \"nova-kuttl-scheduler-0\" (UID: \"1ccdca19-e7ab-4635-8b7b-72d4690b0632\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.075084 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-logs\") pod \"nova-kuttl-api-0\" (UID: \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.075671 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.075807 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.080451 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-config-data\") pod \"nova-kuttl-api-0\" (UID: \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.102012 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frqk7\" (UniqueName: \"kubernetes.io/projected/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-kube-api-access-frqk7\") pod \"nova-kuttl-api-0\" (UID: \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.176525 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69h7w\" (UniqueName: \"kubernetes.io/projected/844b0520-90a3-42ea-ae7e-d344dad65dc6-kube-api-access-69h7w\") pod \"nova-kuttl-metadata-0\" (UID: \"844b0520-90a3-42ea-ae7e-d344dad65dc6\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.176598 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/844b0520-90a3-42ea-ae7e-d344dad65dc6-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"844b0520-90a3-42ea-ae7e-d344dad65dc6\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.176622 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ccdca19-e7ab-4635-8b7b-72d4690b0632-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"1ccdca19-e7ab-4635-8b7b-72d4690b0632\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.176671 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/844b0520-90a3-42ea-ae7e-d344dad65dc6-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"844b0520-90a3-42ea-ae7e-d344dad65dc6\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.176715 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrvmp\" (UniqueName: \"kubernetes.io/projected/1ccdca19-e7ab-4635-8b7b-72d4690b0632-kube-api-access-vrvmp\") pod \"nova-kuttl-scheduler-0\" (UID: \"1ccdca19-e7ab-4635-8b7b-72d4690b0632\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.177642 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/844b0520-90a3-42ea-ae7e-d344dad65dc6-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"844b0520-90a3-42ea-ae7e-d344dad65dc6\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.181739 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/844b0520-90a3-42ea-ae7e-d344dad65dc6-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"844b0520-90a3-42ea-ae7e-d344dad65dc6\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.183201 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ccdca19-e7ab-4635-8b7b-72d4690b0632-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"1ccdca19-e7ab-4635-8b7b-72d4690b0632\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.197163 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrvmp\" (UniqueName: \"kubernetes.io/projected/1ccdca19-e7ab-4635-8b7b-72d4690b0632-kube-api-access-vrvmp\") pod \"nova-kuttl-scheduler-0\" (UID: \"1ccdca19-e7ab-4635-8b7b-72d4690b0632\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.197375 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69h7w\" (UniqueName: \"kubernetes.io/projected/844b0520-90a3-42ea-ae7e-d344dad65dc6-kube-api-access-69h7w\") pod \"nova-kuttl-metadata-0\" (UID: \"844b0520-90a3-42ea-ae7e-d344dad65dc6\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.210412 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.343269 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.365531 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:18 crc kubenswrapper[4711]: I0123 08:51:18.894179 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz"] Jan 23 08:51:19 crc kubenswrapper[4711]: I0123 08:51:19.071566 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" event={"ID":"4a43e07a-6038-4e84-8fb0-3163c706ebfd","Type":"ContainerStarted","Data":"ab6c827f4e0bc082dedcf6170555890ccffb68a27f0a7ea3cfa3d9c66cc94b05"} Jan 23 08:51:19 crc kubenswrapper[4711]: I0123 08:51:19.105667 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6"] Jan 23 08:51:19 crc kubenswrapper[4711]: W0123 08:51:19.114595 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68a17610_bf27_4760_a41f_f575d1de98ad.slice/crio-ea8eca940022cb8dfbbe73f71f71465d53c7bf1c6d7dbb16aeac38e45fb7ff2b WatchSource:0}: Error finding container ea8eca940022cb8dfbbe73f71f71465d53c7bf1c6d7dbb16aeac38e45fb7ff2b: Status 404 returned error can't find the container with id ea8eca940022cb8dfbbe73f71f71465d53c7bf1c6d7dbb16aeac38e45fb7ff2b Jan 23 08:51:19 crc kubenswrapper[4711]: I0123 08:51:19.116834 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75"] Jan 23 08:51:19 crc kubenswrapper[4711]: I0123 08:51:19.123442 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:51:19 crc kubenswrapper[4711]: W0123 08:51:19.124564 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2f209cc_93ef_492d_930e_a1b9d1b8c2b1.slice/crio-e7b051cd38b14155b077d7eaf16ec1daec9c9099fbf4e2849435240e27d8e02a WatchSource:0}: Error finding container e7b051cd38b14155b077d7eaf16ec1daec9c9099fbf4e2849435240e27d8e02a: Status 404 returned error can't find the container with id e7b051cd38b14155b077d7eaf16ec1daec9c9099fbf4e2849435240e27d8e02a Jan 23 08:51:19 crc kubenswrapper[4711]: I0123 08:51:19.150360 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:51:19 crc kubenswrapper[4711]: I0123 08:51:19.239463 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:51:19 crc kubenswrapper[4711]: W0123 08:51:19.257465 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ccdca19_e7ab_4635_8b7b_72d4690b0632.slice/crio-0bcad7ffbf495f794248565cd012dfc1240a102339f6eaf8894796b3eba230e1 WatchSource:0}: Error finding container 0bcad7ffbf495f794248565cd012dfc1240a102339f6eaf8894796b3eba230e1: Status 404 returned error can't find the container with id 0bcad7ffbf495f794248565cd012dfc1240a102339f6eaf8894796b3eba230e1 Jan 23 08:51:19 crc kubenswrapper[4711]: I0123 08:51:19.476120 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:51:19 crc kubenswrapper[4711]: E0123 08:51:19.476399 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:51:19 crc kubenswrapper[4711]: I0123 08:51:19.947214 4711 scope.go:117] "RemoveContainer" containerID="29498e8723bec53c07ceb121f585404a7d28138b00bc83f2c525b61a8a00336d" Jan 23 08:51:19 crc kubenswrapper[4711]: I0123 08:51:19.983362 4711 scope.go:117] "RemoveContainer" containerID="2857200d70b376c98dcc2c645688d4a72ff3a2fb1fb904b1ed49f2fed40b4b89" Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.010345 4711 scope.go:117] "RemoveContainer" containerID="894aa8f8bd6d4097ffc9342a899c14a16061645d7203fc874fecda0c019af7d3" Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.039992 4711 scope.go:117] "RemoveContainer" containerID="ed7b60ce9b29bee0a8312b2bc86772c7b05819903c8a4a5250a2ea2367f849d7" Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.084638 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" event={"ID":"4a43e07a-6038-4e84-8fb0-3163c706ebfd","Type":"ContainerStarted","Data":"42e7d55005a5f09e8ffcd1080f65d5396d6781a3e9e14da0860f81bb5f68bdac"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.089074 4711 scope.go:117] "RemoveContainer" containerID="f08642169a892a4e6bf734388a28d02949d7af46e34652ec665560a0b10fd993" Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.089626 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1","Type":"ContainerStarted","Data":"bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.089667 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1","Type":"ContainerStarted","Data":"2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.089679 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1","Type":"ContainerStarted","Data":"e7b051cd38b14155b077d7eaf16ec1daec9c9099fbf4e2849435240e27d8e02a"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.096062 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" event={"ID":"68a17610-bf27-4760-a41f-f575d1de98ad","Type":"ContainerStarted","Data":"0932d432ac510c8282e41859ec4057cc7efae1fe7b9f4dc95e818628e6c9934d"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.096098 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" event={"ID":"68a17610-bf27-4760-a41f-f575d1de98ad","Type":"ContainerStarted","Data":"ea8eca940022cb8dfbbe73f71f71465d53c7bf1c6d7dbb16aeac38e45fb7ff2b"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.101394 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" event={"ID":"516a75f8-e3ec-4723-ab0e-e0f2656077e8","Type":"ContainerStarted","Data":"4789abcb6a8aa4883a0288c88291cf6c65f97469836b55b3d1ebb336c642f1d4"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.101440 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" event={"ID":"516a75f8-e3ec-4723-ab0e-e0f2656077e8","Type":"ContainerStarted","Data":"7ebe66e9ee9f889d9eef9a23a1600ec0162f016ea28799c422a677116c6b01a7"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.106005 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" podStartSLOduration=3.105988875 podStartE2EDuration="3.105988875s" podCreationTimestamp="2026-01-23 08:51:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:51:20.099182718 +0000 UTC m=+1865.672139086" watchObservedRunningTime="2026-01-23 08:51:20.105988875 +0000 UTC m=+1865.678945243" Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.108830 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"844b0520-90a3-42ea-ae7e-d344dad65dc6","Type":"ContainerStarted","Data":"f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.108879 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"844b0520-90a3-42ea-ae7e-d344dad65dc6","Type":"ContainerStarted","Data":"b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.108892 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"844b0520-90a3-42ea-ae7e-d344dad65dc6","Type":"ContainerStarted","Data":"f6d701b85ba715e80575ccbed75cfca365badeade757d546f353320b650d3646"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.117952 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"1ccdca19-e7ab-4635-8b7b-72d4690b0632","Type":"ContainerStarted","Data":"3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.117997 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"1ccdca19-e7ab-4635-8b7b-72d4690b0632","Type":"ContainerStarted","Data":"0bcad7ffbf495f794248565cd012dfc1240a102339f6eaf8894796b3eba230e1"} Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.124000 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-0" podStartSLOduration=3.123985717 podStartE2EDuration="3.123985717s" podCreationTimestamp="2026-01-23 08:51:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:51:20.12292071 +0000 UTC m=+1865.695877088" watchObservedRunningTime="2026-01-23 08:51:20.123985717 +0000 UTC m=+1865.696942085" Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.146150 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" podStartSLOduration=3.146135249 podStartE2EDuration="3.146135249s" podCreationTimestamp="2026-01-23 08:51:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:51:20.143214668 +0000 UTC m=+1865.716171036" watchObservedRunningTime="2026-01-23 08:51:20.146135249 +0000 UTC m=+1865.719091607" Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.156728 4711 scope.go:117] "RemoveContainer" containerID="3542ccc41defdb0e51d72c252143be1bc5895c650d6374de75785663334df6c9" Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.162772 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-0" podStartSLOduration=3.162749057 podStartE2EDuration="3.162749057s" podCreationTimestamp="2026-01-23 08:51:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:51:20.160750487 +0000 UTC m=+1865.733706865" watchObservedRunningTime="2026-01-23 08:51:20.162749057 +0000 UTC m=+1865.735705425" Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.205703 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podStartSLOduration=3.205682858 podStartE2EDuration="3.205682858s" podCreationTimestamp="2026-01-23 08:51:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:51:20.20125422 +0000 UTC m=+1865.774210588" watchObservedRunningTime="2026-01-23 08:51:20.205682858 +0000 UTC m=+1865.778639226" Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.210218 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" podStartSLOduration=3.210202059 podStartE2EDuration="3.210202059s" podCreationTimestamp="2026-01-23 08:51:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:51:20.17881742 +0000 UTC m=+1865.751773798" watchObservedRunningTime="2026-01-23 08:51:20.210202059 +0000 UTC m=+1865.783158427" Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.211961 4711 scope.go:117] "RemoveContainer" containerID="1b59c5a45bec5d7be1d5919c52c8fb4fa3ee4e5bbf942fc6e799cf8c0f8112e2" Jan 23 08:51:20 crc kubenswrapper[4711]: I0123 08:51:20.232030 4711 scope.go:117] "RemoveContainer" containerID="d459bb977e47533e5fad502adff1260883892a96c3f4c7c9f2af17c793d0f2ff" Jan 23 08:51:23 crc kubenswrapper[4711]: I0123 08:51:23.155807 4711 generic.go:334] "Generic (PLEG): container finished" podID="4a43e07a-6038-4e84-8fb0-3163c706ebfd" containerID="42e7d55005a5f09e8ffcd1080f65d5396d6781a3e9e14da0860f81bb5f68bdac" exitCode=255 Jan 23 08:51:23 crc kubenswrapper[4711]: I0123 08:51:23.155907 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" event={"ID":"4a43e07a-6038-4e84-8fb0-3163c706ebfd","Type":"ContainerDied","Data":"42e7d55005a5f09e8ffcd1080f65d5396d6781a3e9e14da0860f81bb5f68bdac"} Jan 23 08:51:23 crc kubenswrapper[4711]: I0123 08:51:23.157746 4711 scope.go:117] "RemoveContainer" containerID="42e7d55005a5f09e8ffcd1080f65d5396d6781a3e9e14da0860f81bb5f68bdac" Jan 23 08:51:23 crc kubenswrapper[4711]: I0123 08:51:23.343774 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:23 crc kubenswrapper[4711]: I0123 08:51:23.366807 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:23 crc kubenswrapper[4711]: I0123 08:51:23.366948 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:26 crc kubenswrapper[4711]: I0123 08:51:26.182072 4711 generic.go:334] "Generic (PLEG): container finished" podID="68a17610-bf27-4760-a41f-f575d1de98ad" containerID="0932d432ac510c8282e41859ec4057cc7efae1fe7b9f4dc95e818628e6c9934d" exitCode=0 Jan 23 08:51:26 crc kubenswrapper[4711]: I0123 08:51:26.182155 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" event={"ID":"68a17610-bf27-4760-a41f-f575d1de98ad","Type":"ContainerDied","Data":"0932d432ac510c8282e41859ec4057cc7efae1fe7b9f4dc95e818628e6c9934d"} Jan 23 08:51:26 crc kubenswrapper[4711]: I0123 08:51:26.184283 4711 generic.go:334] "Generic (PLEG): container finished" podID="516a75f8-e3ec-4723-ab0e-e0f2656077e8" containerID="4789abcb6a8aa4883a0288c88291cf6c65f97469836b55b3d1ebb336c642f1d4" exitCode=0 Jan 23 08:51:26 crc kubenswrapper[4711]: I0123 08:51:26.184351 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" event={"ID":"516a75f8-e3ec-4723-ab0e-e0f2656077e8","Type":"ContainerDied","Data":"4789abcb6a8aa4883a0288c88291cf6c65f97469836b55b3d1ebb336c642f1d4"} Jan 23 08:51:26 crc kubenswrapper[4711]: I0123 08:51:26.187887 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" event={"ID":"4a43e07a-6038-4e84-8fb0-3163c706ebfd","Type":"ContainerStarted","Data":"4edf8832aec27e995aa3ea0d4eabd1e49e7a51ca65c5b85bdef7502040b4e3a8"} Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.574424 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.595552 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.653400 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59s7d\" (UniqueName: \"kubernetes.io/projected/516a75f8-e3ec-4723-ab0e-e0f2656077e8-kube-api-access-59s7d\") pod \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\" (UID: \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\") " Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.653576 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68a17610-bf27-4760-a41f-f575d1de98ad-scripts\") pod \"68a17610-bf27-4760-a41f-f575d1de98ad\" (UID: \"68a17610-bf27-4760-a41f-f575d1de98ad\") " Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.653635 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/516a75f8-e3ec-4723-ab0e-e0f2656077e8-config-data\") pod \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\" (UID: \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\") " Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.653789 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hhvn\" (UniqueName: \"kubernetes.io/projected/68a17610-bf27-4760-a41f-f575d1de98ad-kube-api-access-4hhvn\") pod \"68a17610-bf27-4760-a41f-f575d1de98ad\" (UID: \"68a17610-bf27-4760-a41f-f575d1de98ad\") " Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.653884 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a17610-bf27-4760-a41f-f575d1de98ad-config-data\") pod \"68a17610-bf27-4760-a41f-f575d1de98ad\" (UID: \"68a17610-bf27-4760-a41f-f575d1de98ad\") " Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.653981 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/516a75f8-e3ec-4723-ab0e-e0f2656077e8-scripts\") pod \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\" (UID: \"516a75f8-e3ec-4723-ab0e-e0f2656077e8\") " Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.659080 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68a17610-bf27-4760-a41f-f575d1de98ad-kube-api-access-4hhvn" (OuterVolumeSpecName: "kube-api-access-4hhvn") pod "68a17610-bf27-4760-a41f-f575d1de98ad" (UID: "68a17610-bf27-4760-a41f-f575d1de98ad"). InnerVolumeSpecName "kube-api-access-4hhvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.660372 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/516a75f8-e3ec-4723-ab0e-e0f2656077e8-kube-api-access-59s7d" (OuterVolumeSpecName: "kube-api-access-59s7d") pod "516a75f8-e3ec-4723-ab0e-e0f2656077e8" (UID: "516a75f8-e3ec-4723-ab0e-e0f2656077e8"). InnerVolumeSpecName "kube-api-access-59s7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.660715 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/516a75f8-e3ec-4723-ab0e-e0f2656077e8-scripts" (OuterVolumeSpecName: "scripts") pod "516a75f8-e3ec-4723-ab0e-e0f2656077e8" (UID: "516a75f8-e3ec-4723-ab0e-e0f2656077e8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.666663 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68a17610-bf27-4760-a41f-f575d1de98ad-scripts" (OuterVolumeSpecName: "scripts") pod "68a17610-bf27-4760-a41f-f575d1de98ad" (UID: "68a17610-bf27-4760-a41f-f575d1de98ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.677855 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/516a75f8-e3ec-4723-ab0e-e0f2656077e8-config-data" (OuterVolumeSpecName: "config-data") pod "516a75f8-e3ec-4723-ab0e-e0f2656077e8" (UID: "516a75f8-e3ec-4723-ab0e-e0f2656077e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.692630 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68a17610-bf27-4760-a41f-f575d1de98ad-config-data" (OuterVolumeSpecName: "config-data") pod "68a17610-bf27-4760-a41f-f575d1de98ad" (UID: "68a17610-bf27-4760-a41f-f575d1de98ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.756368 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hhvn\" (UniqueName: \"kubernetes.io/projected/68a17610-bf27-4760-a41f-f575d1de98ad-kube-api-access-4hhvn\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.756413 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a17610-bf27-4760-a41f-f575d1de98ad-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.756424 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/516a75f8-e3ec-4723-ab0e-e0f2656077e8-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.756434 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59s7d\" (UniqueName: \"kubernetes.io/projected/516a75f8-e3ec-4723-ab0e-e0f2656077e8-kube-api-access-59s7d\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.756445 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68a17610-bf27-4760-a41f-f575d1de98ad-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:27 crc kubenswrapper[4711]: I0123 08:51:27.756457 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/516a75f8-e3ec-4723-ab0e-e0f2656077e8-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.203696 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.203626 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6" event={"ID":"68a17610-bf27-4760-a41f-f575d1de98ad","Type":"ContainerDied","Data":"ea8eca940022cb8dfbbe73f71f71465d53c7bf1c6d7dbb16aeac38e45fb7ff2b"} Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.204251 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea8eca940022cb8dfbbe73f71f71465d53c7bf1c6d7dbb16aeac38e45fb7ff2b" Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.205417 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" event={"ID":"516a75f8-e3ec-4723-ab0e-e0f2656077e8","Type":"ContainerDied","Data":"7ebe66e9ee9f889d9eef9a23a1600ec0162f016ea28799c422a677116c6b01a7"} Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.205453 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ebe66e9ee9f889d9eef9a23a1600ec0162f016ea28799c422a677116c6b01a7" Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.205554 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75" Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.211582 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.211616 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.343824 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.366866 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.366924 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.369192 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.418767 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.430811 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:51:28 crc kubenswrapper[4711]: I0123 08:51:28.470174 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:51:29 crc kubenswrapper[4711]: I0123 08:51:29.212020 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" containerName="nova-kuttl-api-log" containerID="cri-o://2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668" gracePeriod=30 Jan 23 08:51:29 crc kubenswrapper[4711]: I0123 08:51:29.212150 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" containerName="nova-kuttl-api-api" containerID="cri-o://bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee" gracePeriod=30 Jan 23 08:51:29 crc kubenswrapper[4711]: I0123 08:51:29.212169 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="844b0520-90a3-42ea-ae7e-d344dad65dc6" containerName="nova-kuttl-metadata-log" containerID="cri-o://b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20" gracePeriod=30 Jan 23 08:51:29 crc kubenswrapper[4711]: I0123 08:51:29.212279 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="844b0520-90a3-42ea-ae7e-d344dad65dc6" containerName="nova-kuttl-metadata-metadata" containerID="cri-o://f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf" gracePeriod=30 Jan 23 08:51:29 crc kubenswrapper[4711]: I0123 08:51:29.218316 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.200:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:51:29 crc kubenswrapper[4711]: I0123 08:51:29.218329 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.200:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:51:29 crc kubenswrapper[4711]: I0123 08:51:29.220482 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="844b0520-90a3-42ea-ae7e-d344dad65dc6" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.202:8775/\": EOF" Jan 23 08:51:29 crc kubenswrapper[4711]: I0123 08:51:29.220684 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="844b0520-90a3-42ea-ae7e-d344dad65dc6" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.202:8775/\": EOF" Jan 23 08:51:29 crc kubenswrapper[4711]: I0123 08:51:29.243024 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:30 crc kubenswrapper[4711]: I0123 08:51:30.226509 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="1ccdca19-e7ab-4635-8b7b-72d4690b0632" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee" gracePeriod=30 Jan 23 08:51:31 crc kubenswrapper[4711]: I0123 08:51:31.239017 4711 generic.go:334] "Generic (PLEG): container finished" podID="4a43e07a-6038-4e84-8fb0-3163c706ebfd" containerID="4edf8832aec27e995aa3ea0d4eabd1e49e7a51ca65c5b85bdef7502040b4e3a8" exitCode=0 Jan 23 08:51:31 crc kubenswrapper[4711]: I0123 08:51:31.239134 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" event={"ID":"4a43e07a-6038-4e84-8fb0-3163c706ebfd","Type":"ContainerDied","Data":"4edf8832aec27e995aa3ea0d4eabd1e49e7a51ca65c5b85bdef7502040b4e3a8"} Jan 23 08:51:31 crc kubenswrapper[4711]: I0123 08:51:31.240021 4711 scope.go:117] "RemoveContainer" containerID="42e7d55005a5f09e8ffcd1080f65d5396d6781a3e9e14da0860f81bb5f68bdac" Jan 23 08:51:31 crc kubenswrapper[4711]: I0123 08:51:31.245757 4711 generic.go:334] "Generic (PLEG): container finished" podID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" containerID="2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668" exitCode=143 Jan 23 08:51:31 crc kubenswrapper[4711]: I0123 08:51:31.245821 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1","Type":"ContainerDied","Data":"2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668"} Jan 23 08:51:31 crc kubenswrapper[4711]: I0123 08:51:31.254368 4711 generic.go:334] "Generic (PLEG): container finished" podID="844b0520-90a3-42ea-ae7e-d344dad65dc6" containerID="b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20" exitCode=143 Jan 23 08:51:31 crc kubenswrapper[4711]: I0123 08:51:31.254441 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"844b0520-90a3-42ea-ae7e-d344dad65dc6","Type":"ContainerDied","Data":"b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20"} Jan 23 08:51:32 crc kubenswrapper[4711]: I0123 08:51:32.597587 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:32 crc kubenswrapper[4711]: I0123 08:51:32.668620 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a43e07a-6038-4e84-8fb0-3163c706ebfd-config-data\") pod \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\" (UID: \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\") " Jan 23 08:51:32 crc kubenswrapper[4711]: I0123 08:51:32.669880 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kqjs\" (UniqueName: \"kubernetes.io/projected/4a43e07a-6038-4e84-8fb0-3163c706ebfd-kube-api-access-6kqjs\") pod \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\" (UID: \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\") " Jan 23 08:51:32 crc kubenswrapper[4711]: I0123 08:51:32.669926 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a43e07a-6038-4e84-8fb0-3163c706ebfd-scripts\") pod \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\" (UID: \"4a43e07a-6038-4e84-8fb0-3163c706ebfd\") " Jan 23 08:51:32 crc kubenswrapper[4711]: I0123 08:51:32.675697 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a43e07a-6038-4e84-8fb0-3163c706ebfd-scripts" (OuterVolumeSpecName: "scripts") pod "4a43e07a-6038-4e84-8fb0-3163c706ebfd" (UID: "4a43e07a-6038-4e84-8fb0-3163c706ebfd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:32 crc kubenswrapper[4711]: I0123 08:51:32.677401 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a43e07a-6038-4e84-8fb0-3163c706ebfd-kube-api-access-6kqjs" (OuterVolumeSpecName: "kube-api-access-6kqjs") pod "4a43e07a-6038-4e84-8fb0-3163c706ebfd" (UID: "4a43e07a-6038-4e84-8fb0-3163c706ebfd"). InnerVolumeSpecName "kube-api-access-6kqjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:51:32 crc kubenswrapper[4711]: I0123 08:51:32.703675 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a43e07a-6038-4e84-8fb0-3163c706ebfd-config-data" (OuterVolumeSpecName: "config-data") pod "4a43e07a-6038-4e84-8fb0-3163c706ebfd" (UID: "4a43e07a-6038-4e84-8fb0-3163c706ebfd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:32 crc kubenswrapper[4711]: I0123 08:51:32.770868 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kqjs\" (UniqueName: \"kubernetes.io/projected/4a43e07a-6038-4e84-8fb0-3163c706ebfd-kube-api-access-6kqjs\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:32 crc kubenswrapper[4711]: I0123 08:51:32.770974 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a43e07a-6038-4e84-8fb0-3163c706ebfd-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:32 crc kubenswrapper[4711]: I0123 08:51:32.770992 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a43e07a-6038-4e84-8fb0-3163c706ebfd-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:33 crc kubenswrapper[4711]: I0123 08:51:33.273719 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" Jan 23 08:51:33 crc kubenswrapper[4711]: I0123 08:51:33.283003 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz" event={"ID":"4a43e07a-6038-4e84-8fb0-3163c706ebfd","Type":"ContainerDied","Data":"ab6c827f4e0bc082dedcf6170555890ccffb68a27f0a7ea3cfa3d9c66cc94b05"} Jan 23 08:51:33 crc kubenswrapper[4711]: I0123 08:51:33.283044 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab6c827f4e0bc082dedcf6170555890ccffb68a27f0a7ea3cfa3d9c66cc94b05" Jan 23 08:51:33 crc kubenswrapper[4711]: E0123 08:51:33.346313 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:51:33 crc kubenswrapper[4711]: E0123 08:51:33.347785 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:51:33 crc kubenswrapper[4711]: E0123 08:51:33.348805 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:51:33 crc kubenswrapper[4711]: E0123 08:51:33.348868 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="1ccdca19-e7ab-4635-8b7b-72d4690b0632" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:51:33 crc kubenswrapper[4711]: I0123 08:51:33.473475 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:51:33 crc kubenswrapper[4711]: E0123 08:51:33.473796 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:51:34 crc kubenswrapper[4711]: I0123 08:51:34.940973 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.007112 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ccdca19-e7ab-4635-8b7b-72d4690b0632-config-data\") pod \"1ccdca19-e7ab-4635-8b7b-72d4690b0632\" (UID: \"1ccdca19-e7ab-4635-8b7b-72d4690b0632\") " Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.007400 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrvmp\" (UniqueName: \"kubernetes.io/projected/1ccdca19-e7ab-4635-8b7b-72d4690b0632-kube-api-access-vrvmp\") pod \"1ccdca19-e7ab-4635-8b7b-72d4690b0632\" (UID: \"1ccdca19-e7ab-4635-8b7b-72d4690b0632\") " Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.012471 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ccdca19-e7ab-4635-8b7b-72d4690b0632-kube-api-access-vrvmp" (OuterVolumeSpecName: "kube-api-access-vrvmp") pod "1ccdca19-e7ab-4635-8b7b-72d4690b0632" (UID: "1ccdca19-e7ab-4635-8b7b-72d4690b0632"). InnerVolumeSpecName "kube-api-access-vrvmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.039267 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ccdca19-e7ab-4635-8b7b-72d4690b0632-config-data" (OuterVolumeSpecName: "config-data") pod "1ccdca19-e7ab-4635-8b7b-72d4690b0632" (UID: "1ccdca19-e7ab-4635-8b7b-72d4690b0632"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.104281 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.108196 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-logs\") pod \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\" (UID: \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\") " Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.108324 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-config-data\") pod \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\" (UID: \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\") " Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.108362 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frqk7\" (UniqueName: \"kubernetes.io/projected/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-kube-api-access-frqk7\") pod \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\" (UID: \"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1\") " Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.108561 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrvmp\" (UniqueName: \"kubernetes.io/projected/1ccdca19-e7ab-4635-8b7b-72d4690b0632-kube-api-access-vrvmp\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.108572 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ccdca19-e7ab-4635-8b7b-72d4690b0632-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.109134 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-logs" (OuterVolumeSpecName: "logs") pod "f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" (UID: "f2f209cc-93ef-492d-930e-a1b9d1b8c2b1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.111693 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-kube-api-access-frqk7" (OuterVolumeSpecName: "kube-api-access-frqk7") pod "f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" (UID: "f2f209cc-93ef-492d-930e-a1b9d1b8c2b1"). InnerVolumeSpecName "kube-api-access-frqk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.150768 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-config-data" (OuterVolumeSpecName: "config-data") pod "f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" (UID: "f2f209cc-93ef-492d-930e-a1b9d1b8c2b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.209585 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.209652 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frqk7\" (UniqueName: \"kubernetes.io/projected/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-kube-api-access-frqk7\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.209668 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.298045 4711 generic.go:334] "Generic (PLEG): container finished" podID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" containerID="bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee" exitCode=0 Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.298086 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.298196 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1","Type":"ContainerDied","Data":"bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee"} Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.298241 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"f2f209cc-93ef-492d-930e-a1b9d1b8c2b1","Type":"ContainerDied","Data":"e7b051cd38b14155b077d7eaf16ec1daec9c9099fbf4e2849435240e27d8e02a"} Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.298260 4711 scope.go:117] "RemoveContainer" containerID="bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.301301 4711 generic.go:334] "Generic (PLEG): container finished" podID="1ccdca19-e7ab-4635-8b7b-72d4690b0632" containerID="3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee" exitCode=0 Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.301374 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"1ccdca19-e7ab-4635-8b7b-72d4690b0632","Type":"ContainerDied","Data":"3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee"} Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.301403 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"1ccdca19-e7ab-4635-8b7b-72d4690b0632","Type":"ContainerDied","Data":"0bcad7ffbf495f794248565cd012dfc1240a102339f6eaf8894796b3eba230e1"} Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.301453 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.320545 4711 scope.go:117] "RemoveContainer" containerID="2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.352075 4711 scope.go:117] "RemoveContainer" containerID="bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee" Jan 23 08:51:35 crc kubenswrapper[4711]: E0123 08:51:35.356880 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee\": container with ID starting with bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee not found: ID does not exist" containerID="bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.356940 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee"} err="failed to get container status \"bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee\": rpc error: code = NotFound desc = could not find container \"bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee\": container with ID starting with bfb7aa560de5f22de58b73af4e40c50e4af8d4362b84459eba53ca2c2926c4ee not found: ID does not exist" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.356974 4711 scope.go:117] "RemoveContainer" containerID="2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.357086 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:51:35 crc kubenswrapper[4711]: E0123 08:51:35.360315 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668\": container with ID starting with 2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668 not found: ID does not exist" containerID="2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.360348 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668"} err="failed to get container status \"2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668\": rpc error: code = NotFound desc = could not find container \"2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668\": container with ID starting with 2e1e88621e4465e36b4e7f01eb24539fa7a7642cb690af40151a893f1a8c2668 not found: ID does not exist" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.360390 4711 scope.go:117] "RemoveContainer" containerID="3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.371598 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.376674 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384179 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:51:35 crc kubenswrapper[4711]: E0123 08:51:35.384531 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" containerName="nova-kuttl-api-api" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384550 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" containerName="nova-kuttl-api-api" Jan 23 08:51:35 crc kubenswrapper[4711]: E0123 08:51:35.384571 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" containerName="nova-kuttl-api-log" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384578 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" containerName="nova-kuttl-api-log" Jan 23 08:51:35 crc kubenswrapper[4711]: E0123 08:51:35.384593 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68a17610-bf27-4760-a41f-f575d1de98ad" containerName="nova-manage" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384601 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="68a17610-bf27-4760-a41f-f575d1de98ad" containerName="nova-manage" Jan 23 08:51:35 crc kubenswrapper[4711]: E0123 08:51:35.384608 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a43e07a-6038-4e84-8fb0-3163c706ebfd" containerName="nova-manage" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384614 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a43e07a-6038-4e84-8fb0-3163c706ebfd" containerName="nova-manage" Jan 23 08:51:35 crc kubenswrapper[4711]: E0123 08:51:35.384622 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a43e07a-6038-4e84-8fb0-3163c706ebfd" containerName="nova-manage" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384628 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a43e07a-6038-4e84-8fb0-3163c706ebfd" containerName="nova-manage" Jan 23 08:51:35 crc kubenswrapper[4711]: E0123 08:51:35.384634 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ccdca19-e7ab-4635-8b7b-72d4690b0632" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384641 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ccdca19-e7ab-4635-8b7b-72d4690b0632" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:51:35 crc kubenswrapper[4711]: E0123 08:51:35.384655 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="516a75f8-e3ec-4723-ab0e-e0f2656077e8" containerName="nova-manage" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384662 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="516a75f8-e3ec-4723-ab0e-e0f2656077e8" containerName="nova-manage" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384809 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a43e07a-6038-4e84-8fb0-3163c706ebfd" containerName="nova-manage" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384824 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" containerName="nova-kuttl-api-api" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384832 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="516a75f8-e3ec-4723-ab0e-e0f2656077e8" containerName="nova-manage" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384838 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ccdca19-e7ab-4635-8b7b-72d4690b0632" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384846 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="68a17610-bf27-4760-a41f-f575d1de98ad" containerName="nova-manage" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384854 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a43e07a-6038-4e84-8fb0-3163c706ebfd" containerName="nova-manage" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.384870 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" containerName="nova-kuttl-api-log" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.385657 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.388949 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-api-config-data" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.393291 4711 scope.go:117] "RemoveContainer" containerID="3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee" Jan 23 08:51:35 crc kubenswrapper[4711]: E0123 08:51:35.393753 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee\": container with ID starting with 3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee not found: ID does not exist" containerID="3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.393799 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee"} err="failed to get container status \"3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee\": rpc error: code = NotFound desc = could not find container \"3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee\": container with ID starting with 3d3449ac3fbb59e9343f49b5aaa8254de9ce8699596bd1c9cd000ec08cb1b4ee not found: ID does not exist" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.395399 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.405059 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.406112 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.408563 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-scheduler-config-data" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.413266 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.420871 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.422041 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d04088f-e0e5-49e1-945d-9c44c5d06962-config-data\") pod \"nova-kuttl-api-0\" (UID: \"0d04088f-e0e5-49e1-945d-9c44c5d06962\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.422118 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rztr\" (UniqueName: \"kubernetes.io/projected/0d04088f-e0e5-49e1-945d-9c44c5d06962-kube-api-access-9rztr\") pod \"nova-kuttl-api-0\" (UID: \"0d04088f-e0e5-49e1-945d-9c44c5d06962\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.422146 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d04088f-e0e5-49e1-945d-9c44c5d06962-logs\") pod \"nova-kuttl-api-0\" (UID: \"0d04088f-e0e5-49e1-945d-9c44c5d06962\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.422168 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94hrh\" (UniqueName: \"kubernetes.io/projected/3ce6c1c7-7443-468e-9711-062a99175155-kube-api-access-94hrh\") pod \"nova-kuttl-scheduler-0\" (UID: \"3ce6c1c7-7443-468e-9711-062a99175155\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.422255 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ce6c1c7-7443-468e-9711-062a99175155-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"3ce6c1c7-7443-468e-9711-062a99175155\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.490372 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ccdca19-e7ab-4635-8b7b-72d4690b0632" path="/var/lib/kubelet/pods/1ccdca19-e7ab-4635-8b7b-72d4690b0632/volumes" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.491069 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2f209cc-93ef-492d-930e-a1b9d1b8c2b1" path="/var/lib/kubelet/pods/f2f209cc-93ef-492d-930e-a1b9d1b8c2b1/volumes" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.524342 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ce6c1c7-7443-468e-9711-062a99175155-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"3ce6c1c7-7443-468e-9711-062a99175155\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.524417 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d04088f-e0e5-49e1-945d-9c44c5d06962-config-data\") pod \"nova-kuttl-api-0\" (UID: \"0d04088f-e0e5-49e1-945d-9c44c5d06962\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.524472 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rztr\" (UniqueName: \"kubernetes.io/projected/0d04088f-e0e5-49e1-945d-9c44c5d06962-kube-api-access-9rztr\") pod \"nova-kuttl-api-0\" (UID: \"0d04088f-e0e5-49e1-945d-9c44c5d06962\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.524500 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d04088f-e0e5-49e1-945d-9c44c5d06962-logs\") pod \"nova-kuttl-api-0\" (UID: \"0d04088f-e0e5-49e1-945d-9c44c5d06962\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.524608 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94hrh\" (UniqueName: \"kubernetes.io/projected/3ce6c1c7-7443-468e-9711-062a99175155-kube-api-access-94hrh\") pod \"nova-kuttl-scheduler-0\" (UID: \"3ce6c1c7-7443-468e-9711-062a99175155\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.526107 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d04088f-e0e5-49e1-945d-9c44c5d06962-logs\") pod \"nova-kuttl-api-0\" (UID: \"0d04088f-e0e5-49e1-945d-9c44c5d06962\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.529394 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ce6c1c7-7443-468e-9711-062a99175155-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"3ce6c1c7-7443-468e-9711-062a99175155\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.529960 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d04088f-e0e5-49e1-945d-9c44c5d06962-config-data\") pod \"nova-kuttl-api-0\" (UID: \"0d04088f-e0e5-49e1-945d-9c44c5d06962\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.545966 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rztr\" (UniqueName: \"kubernetes.io/projected/0d04088f-e0e5-49e1-945d-9c44c5d06962-kube-api-access-9rztr\") pod \"nova-kuttl-api-0\" (UID: \"0d04088f-e0e5-49e1-945d-9c44c5d06962\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.559655 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94hrh\" (UniqueName: \"kubernetes.io/projected/3ce6c1c7-7443-468e-9711-062a99175155-kube-api-access-94hrh\") pod \"nova-kuttl-scheduler-0\" (UID: \"3ce6c1c7-7443-468e-9711-062a99175155\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.704497 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.722831 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.901366 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.931143 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/844b0520-90a3-42ea-ae7e-d344dad65dc6-logs\") pod \"844b0520-90a3-42ea-ae7e-d344dad65dc6\" (UID: \"844b0520-90a3-42ea-ae7e-d344dad65dc6\") " Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.931190 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/844b0520-90a3-42ea-ae7e-d344dad65dc6-config-data\") pod \"844b0520-90a3-42ea-ae7e-d344dad65dc6\" (UID: \"844b0520-90a3-42ea-ae7e-d344dad65dc6\") " Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.931906 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69h7w\" (UniqueName: \"kubernetes.io/projected/844b0520-90a3-42ea-ae7e-d344dad65dc6-kube-api-access-69h7w\") pod \"844b0520-90a3-42ea-ae7e-d344dad65dc6\" (UID: \"844b0520-90a3-42ea-ae7e-d344dad65dc6\") " Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.931954 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/844b0520-90a3-42ea-ae7e-d344dad65dc6-logs" (OuterVolumeSpecName: "logs") pod "844b0520-90a3-42ea-ae7e-d344dad65dc6" (UID: "844b0520-90a3-42ea-ae7e-d344dad65dc6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.933322 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/844b0520-90a3-42ea-ae7e-d344dad65dc6-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.937021 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/844b0520-90a3-42ea-ae7e-d344dad65dc6-kube-api-access-69h7w" (OuterVolumeSpecName: "kube-api-access-69h7w") pod "844b0520-90a3-42ea-ae7e-d344dad65dc6" (UID: "844b0520-90a3-42ea-ae7e-d344dad65dc6"). InnerVolumeSpecName "kube-api-access-69h7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:51:35 crc kubenswrapper[4711]: I0123 08:51:35.951748 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/844b0520-90a3-42ea-ae7e-d344dad65dc6-config-data" (OuterVolumeSpecName: "config-data") pod "844b0520-90a3-42ea-ae7e-d344dad65dc6" (UID: "844b0520-90a3-42ea-ae7e-d344dad65dc6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.055408 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/844b0520-90a3-42ea-ae7e-d344dad65dc6-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.055439 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69h7w\" (UniqueName: \"kubernetes.io/projected/844b0520-90a3-42ea-ae7e-d344dad65dc6-kube-api-access-69h7w\") on node \"crc\" DevicePath \"\"" Jan 23 08:51:36 crc kubenswrapper[4711]: W0123 08:51:36.150234 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d04088f_e0e5_49e1_945d_9c44c5d06962.slice/crio-35bb74534be674e83118f1c38bc473c26b3045790686497a7c1719a13f5eb276 WatchSource:0}: Error finding container 35bb74534be674e83118f1c38bc473c26b3045790686497a7c1719a13f5eb276: Status 404 returned error can't find the container with id 35bb74534be674e83118f1c38bc473c26b3045790686497a7c1719a13f5eb276 Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.157878 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:51:36 crc kubenswrapper[4711]: W0123 08:51:36.213313 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ce6c1c7_7443_468e_9711_062a99175155.slice/crio-5574d01c749342d5f99c3064d5669224d632ee160edf64905818ff7151e68577 WatchSource:0}: Error finding container 5574d01c749342d5f99c3064d5669224d632ee160edf64905818ff7151e68577: Status 404 returned error can't find the container with id 5574d01c749342d5f99c3064d5669224d632ee160edf64905818ff7151e68577 Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.214733 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.326726 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"0d04088f-e0e5-49e1-945d-9c44c5d06962","Type":"ContainerStarted","Data":"35bb74534be674e83118f1c38bc473c26b3045790686497a7c1719a13f5eb276"} Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.331419 4711 generic.go:334] "Generic (PLEG): container finished" podID="844b0520-90a3-42ea-ae7e-d344dad65dc6" containerID="f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf" exitCode=0 Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.331475 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"844b0520-90a3-42ea-ae7e-d344dad65dc6","Type":"ContainerDied","Data":"f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf"} Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.331516 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.331594 4711 scope.go:117] "RemoveContainer" containerID="f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.331578 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"844b0520-90a3-42ea-ae7e-d344dad65dc6","Type":"ContainerDied","Data":"f6d701b85ba715e80575ccbed75cfca365badeade757d546f353320b650d3646"} Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.332749 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"3ce6c1c7-7443-468e-9711-062a99175155","Type":"ContainerStarted","Data":"5574d01c749342d5f99c3064d5669224d632ee160edf64905818ff7151e68577"} Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.350345 4711 scope.go:117] "RemoveContainer" containerID="b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.372597 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.379495 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.379867 4711 scope.go:117] "RemoveContainer" containerID="f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf" Jan 23 08:51:36 crc kubenswrapper[4711]: E0123 08:51:36.391624 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf\": container with ID starting with f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf not found: ID does not exist" containerID="f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.391680 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf"} err="failed to get container status \"f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf\": rpc error: code = NotFound desc = could not find container \"f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf\": container with ID starting with f6e90795344a4a0ae7ddd2f915247279e29584e8c9f9d142154b12fb44450bdf not found: ID does not exist" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.391714 4711 scope.go:117] "RemoveContainer" containerID="b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20" Jan 23 08:51:36 crc kubenswrapper[4711]: E0123 08:51:36.392067 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20\": container with ID starting with b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20 not found: ID does not exist" containerID="b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.392090 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20"} err="failed to get container status \"b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20\": rpc error: code = NotFound desc = could not find container \"b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20\": container with ID starting with b1ef276d84b2b4337d57ebbc5efef1812de1030f96f21fd97f6f0848096f7b20 not found: ID does not exist" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.398447 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:51:36 crc kubenswrapper[4711]: E0123 08:51:36.398805 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="844b0520-90a3-42ea-ae7e-d344dad65dc6" containerName="nova-kuttl-metadata-metadata" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.398824 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="844b0520-90a3-42ea-ae7e-d344dad65dc6" containerName="nova-kuttl-metadata-metadata" Jan 23 08:51:36 crc kubenswrapper[4711]: E0123 08:51:36.398843 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="844b0520-90a3-42ea-ae7e-d344dad65dc6" containerName="nova-kuttl-metadata-log" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.398850 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="844b0520-90a3-42ea-ae7e-d344dad65dc6" containerName="nova-kuttl-metadata-log" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.399007 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="844b0520-90a3-42ea-ae7e-d344dad65dc6" containerName="nova-kuttl-metadata-log" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.399030 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="844b0520-90a3-42ea-ae7e-d344dad65dc6" containerName="nova-kuttl-metadata-metadata" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.399975 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.404851 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-metadata-config-data" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.431595 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.489915 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqjzb\" (UniqueName: \"kubernetes.io/projected/35f8100a-5b47-4709-8005-06fd54d0da7a-kube-api-access-lqjzb\") pod \"nova-kuttl-metadata-0\" (UID: \"35f8100a-5b47-4709-8005-06fd54d0da7a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.490003 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35f8100a-5b47-4709-8005-06fd54d0da7a-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"35f8100a-5b47-4709-8005-06fd54d0da7a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.490073 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35f8100a-5b47-4709-8005-06fd54d0da7a-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"35f8100a-5b47-4709-8005-06fd54d0da7a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.591598 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqjzb\" (UniqueName: \"kubernetes.io/projected/35f8100a-5b47-4709-8005-06fd54d0da7a-kube-api-access-lqjzb\") pod \"nova-kuttl-metadata-0\" (UID: \"35f8100a-5b47-4709-8005-06fd54d0da7a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.591656 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35f8100a-5b47-4709-8005-06fd54d0da7a-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"35f8100a-5b47-4709-8005-06fd54d0da7a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.591684 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35f8100a-5b47-4709-8005-06fd54d0da7a-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"35f8100a-5b47-4709-8005-06fd54d0da7a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.592031 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35f8100a-5b47-4709-8005-06fd54d0da7a-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"35f8100a-5b47-4709-8005-06fd54d0da7a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.595535 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35f8100a-5b47-4709-8005-06fd54d0da7a-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"35f8100a-5b47-4709-8005-06fd54d0da7a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.610059 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqjzb\" (UniqueName: \"kubernetes.io/projected/35f8100a-5b47-4709-8005-06fd54d0da7a-kube-api-access-lqjzb\") pod \"nova-kuttl-metadata-0\" (UID: \"35f8100a-5b47-4709-8005-06fd54d0da7a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:36 crc kubenswrapper[4711]: I0123 08:51:36.720890 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:37 crc kubenswrapper[4711]: I0123 08:51:37.177375 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:51:37 crc kubenswrapper[4711]: I0123 08:51:37.354264 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"3ce6c1c7-7443-468e-9711-062a99175155","Type":"ContainerStarted","Data":"0f67f5d0b9b6331861ac2e372b31a2b792b0fab8a04dc383eb166e7b399b52d8"} Jan 23 08:51:37 crc kubenswrapper[4711]: I0123 08:51:37.357272 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"0d04088f-e0e5-49e1-945d-9c44c5d06962","Type":"ContainerStarted","Data":"3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393"} Jan 23 08:51:37 crc kubenswrapper[4711]: I0123 08:51:37.357450 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"0d04088f-e0e5-49e1-945d-9c44c5d06962","Type":"ContainerStarted","Data":"b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b"} Jan 23 08:51:37 crc kubenswrapper[4711]: I0123 08:51:37.375894 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"35f8100a-5b47-4709-8005-06fd54d0da7a","Type":"ContainerStarted","Data":"c5c56d03951f53ca34b201c6906a311454c347203c237e9f6b1f1aac93ba09cf"} Jan 23 08:51:37 crc kubenswrapper[4711]: I0123 08:51:37.389123 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podStartSLOduration=2.389100163 podStartE2EDuration="2.389100163s" podCreationTimestamp="2026-01-23 08:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:51:37.368555469 +0000 UTC m=+1882.941511847" watchObservedRunningTime="2026-01-23 08:51:37.389100163 +0000 UTC m=+1882.962056531" Jan 23 08:51:37 crc kubenswrapper[4711]: I0123 08:51:37.392236 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-0" podStartSLOduration=2.392225429 podStartE2EDuration="2.392225429s" podCreationTimestamp="2026-01-23 08:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:51:37.386484779 +0000 UTC m=+1882.959441147" watchObservedRunningTime="2026-01-23 08:51:37.392225429 +0000 UTC m=+1882.965181797" Jan 23 08:51:37 crc kubenswrapper[4711]: I0123 08:51:37.485259 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="844b0520-90a3-42ea-ae7e-d344dad65dc6" path="/var/lib/kubelet/pods/844b0520-90a3-42ea-ae7e-d344dad65dc6/volumes" Jan 23 08:51:38 crc kubenswrapper[4711]: I0123 08:51:38.390205 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"35f8100a-5b47-4709-8005-06fd54d0da7a","Type":"ContainerStarted","Data":"12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818"} Jan 23 08:51:38 crc kubenswrapper[4711]: I0123 08:51:38.390257 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"35f8100a-5b47-4709-8005-06fd54d0da7a","Type":"ContainerStarted","Data":"f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07"} Jan 23 08:51:38 crc kubenswrapper[4711]: I0123 08:51:38.416972 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-0" podStartSLOduration=2.41695204 podStartE2EDuration="2.41695204s" podCreationTimestamp="2026-01-23 08:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:51:38.411194459 +0000 UTC m=+1883.984150837" watchObservedRunningTime="2026-01-23 08:51:38.41695204 +0000 UTC m=+1883.989908408" Jan 23 08:51:40 crc kubenswrapper[4711]: I0123 08:51:40.723581 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:41 crc kubenswrapper[4711]: I0123 08:51:41.723494 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:41 crc kubenswrapper[4711]: I0123 08:51:41.723572 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:45 crc kubenswrapper[4711]: I0123 08:51:45.704781 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:45 crc kubenswrapper[4711]: I0123 08:51:45.705087 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:45 crc kubenswrapper[4711]: I0123 08:51:45.723708 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:45 crc kubenswrapper[4711]: I0123 08:51:45.751352 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:46 crc kubenswrapper[4711]: I0123 08:51:46.476630 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:51:46 crc kubenswrapper[4711]: I0123 08:51:46.723383 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:46 crc kubenswrapper[4711]: I0123 08:51:46.723434 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:46 crc kubenswrapper[4711]: I0123 08:51:46.788805 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="0d04088f-e0e5-49e1-945d-9c44c5d06962" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.203:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:51:46 crc kubenswrapper[4711]: I0123 08:51:46.788869 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="0d04088f-e0e5-49e1-945d-9c44c5d06962" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.203:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:51:47 crc kubenswrapper[4711]: I0123 08:51:47.475254 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:51:47 crc kubenswrapper[4711]: E0123 08:51:47.475573 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:51:47 crc kubenswrapper[4711]: I0123 08:51:47.805795 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.205:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:51:47 crc kubenswrapper[4711]: I0123 08:51:47.805786 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.205:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:51:55 crc kubenswrapper[4711]: I0123 08:51:55.709113 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:55 crc kubenswrapper[4711]: I0123 08:51:55.709915 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:55 crc kubenswrapper[4711]: I0123 08:51:55.711570 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:55 crc kubenswrapper[4711]: I0123 08:51:55.712662 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:56 crc kubenswrapper[4711]: I0123 08:51:56.540174 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:56 crc kubenswrapper[4711]: I0123 08:51:56.543782 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:51:56 crc kubenswrapper[4711]: I0123 08:51:56.726705 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:56 crc kubenswrapper[4711]: I0123 08:51:56.740924 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:56 crc kubenswrapper[4711]: I0123 08:51:56.747192 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:51:57 crc kubenswrapper[4711]: I0123 08:51:57.557647 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:52:02 crc kubenswrapper[4711]: I0123 08:52:02.492326 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:52:02 crc kubenswrapper[4711]: E0123 08:52:02.492950 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:52:09 crc kubenswrapper[4711]: I0123 08:52:09.389727 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:52:09 crc kubenswrapper[4711]: I0123 08:52:09.390377 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" podUID="a91e016e-1671-4b7e-bd99-054776f3d2f8" containerName="nova-kuttl-cell0-conductor-conductor" containerID="cri-o://5761f2d8d00fd3f00a0775d319f904d817d62b08ea0cfec0fcc06471a9106397" gracePeriod=30 Jan 23 08:52:09 crc kubenswrapper[4711]: I0123 08:52:09.413981 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0"] Jan 23 08:52:09 crc kubenswrapper[4711]: I0123 08:52:09.414257 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" podUID="d73adce3-6489-46cd-baca-6b518bfbe671" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" containerID="cri-o://5e776a19b3d2f2608e90e4869cb24bcb3c454b63474d8286d258c480e74f47f9" gracePeriod=30 Jan 23 08:52:09 crc kubenswrapper[4711]: I0123 08:52:09.543333 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:52:09 crc kubenswrapper[4711]: I0123 08:52:09.543631 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="0d04088f-e0e5-49e1-945d-9c44c5d06962" containerName="nova-kuttl-api-log" containerID="cri-o://b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b" gracePeriod=30 Jan 23 08:52:09 crc kubenswrapper[4711]: I0123 08:52:09.543925 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="0d04088f-e0e5-49e1-945d-9c44c5d06962" containerName="nova-kuttl-api-api" containerID="cri-o://3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393" gracePeriod=30 Jan 23 08:52:09 crc kubenswrapper[4711]: I0123 08:52:09.552573 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:52:09 crc kubenswrapper[4711]: I0123 08:52:09.560848 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="3ce6c1c7-7443-468e-9711-062a99175155" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://0f67f5d0b9b6331861ac2e372b31a2b792b0fab8a04dc383eb166e7b399b52d8" gracePeriod=30 Jan 23 08:52:09 crc kubenswrapper[4711]: I0123 08:52:09.644047 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:52:09 crc kubenswrapper[4711]: I0123 08:52:09.644277 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" podUID="fa4c6531-526e-4046-bb9f-975b1bc2a361" containerName="nova-kuttl-cell1-conductor-conductor" containerID="cri-o://6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92" gracePeriod=30 Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.432158 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.538360 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa4c6531-526e-4046-bb9f-975b1bc2a361-config-data\") pod \"fa4c6531-526e-4046-bb9f-975b1bc2a361\" (UID: \"fa4c6531-526e-4046-bb9f-975b1bc2a361\") " Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.538439 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2jq8\" (UniqueName: \"kubernetes.io/projected/fa4c6531-526e-4046-bb9f-975b1bc2a361-kube-api-access-z2jq8\") pod \"fa4c6531-526e-4046-bb9f-975b1bc2a361\" (UID: \"fa4c6531-526e-4046-bb9f-975b1bc2a361\") " Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.544049 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa4c6531-526e-4046-bb9f-975b1bc2a361-kube-api-access-z2jq8" (OuterVolumeSpecName: "kube-api-access-z2jq8") pod "fa4c6531-526e-4046-bb9f-975b1bc2a361" (UID: "fa4c6531-526e-4046-bb9f-975b1bc2a361"). InnerVolumeSpecName "kube-api-access-z2jq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.559735 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa4c6531-526e-4046-bb9f-975b1bc2a361-config-data" (OuterVolumeSpecName: "config-data") pod "fa4c6531-526e-4046-bb9f-975b1bc2a361" (UID: "fa4c6531-526e-4046-bb9f-975b1bc2a361"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.641894 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa4c6531-526e-4046-bb9f-975b1bc2a361-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.641916 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2jq8\" (UniqueName: \"kubernetes.io/projected/fa4c6531-526e-4046-bb9f-975b1bc2a361-kube-api-access-z2jq8\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.647028 4711 generic.go:334] "Generic (PLEG): container finished" podID="fa4c6531-526e-4046-bb9f-975b1bc2a361" containerID="6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92" exitCode=0 Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.647083 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"fa4c6531-526e-4046-bb9f-975b1bc2a361","Type":"ContainerDied","Data":"6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92"} Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.647108 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"fa4c6531-526e-4046-bb9f-975b1bc2a361","Type":"ContainerDied","Data":"fbf58935499b2c60fd882cbf929ed536c95f12ec8800594d6b52e7c34c139230"} Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.647125 4711 scope.go:117] "RemoveContainer" containerID="6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.647214 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.652727 4711 generic.go:334] "Generic (PLEG): container finished" podID="0d04088f-e0e5-49e1-945d-9c44c5d06962" containerID="b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b" exitCode=143 Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.652805 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"0d04088f-e0e5-49e1-945d-9c44c5d06962","Type":"ContainerDied","Data":"b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b"} Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.691382 4711 scope.go:117] "RemoveContainer" containerID="6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92" Jan 23 08:52:10 crc kubenswrapper[4711]: E0123 08:52:10.693971 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92\": container with ID starting with 6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92 not found: ID does not exist" containerID="6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.694015 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92"} err="failed to get container status \"6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92\": rpc error: code = NotFound desc = could not find container \"6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92\": container with ID starting with 6d42d96f4e0e729017efcb0e41819e16c924ad9d34168d8036235d6ae0f73e92 not found: ID does not exist" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.717975 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:52:10 crc kubenswrapper[4711]: E0123 08:52:10.727433 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0f67f5d0b9b6331861ac2e372b31a2b792b0fab8a04dc383eb166e7b399b52d8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:52:10 crc kubenswrapper[4711]: E0123 08:52:10.728899 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0f67f5d0b9b6331861ac2e372b31a2b792b0fab8a04dc383eb166e7b399b52d8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:52:10 crc kubenswrapper[4711]: E0123 08:52:10.730777 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0f67f5d0b9b6331861ac2e372b31a2b792b0fab8a04dc383eb166e7b399b52d8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:52:10 crc kubenswrapper[4711]: E0123 08:52:10.734162 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="3ce6c1c7-7443-468e-9711-062a99175155" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.735547 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.757708 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:52:10 crc kubenswrapper[4711]: E0123 08:52:10.758129 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa4c6531-526e-4046-bb9f-975b1bc2a361" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.758151 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa4c6531-526e-4046-bb9f-975b1bc2a361" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.758339 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa4c6531-526e-4046-bb9f-975b1bc2a361" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.758975 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.760410 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-config-data" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.764109 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.844821 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0b0d47-c436-47b1-ad07-719de0986cb0-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"ed0b0d47-c436-47b1-ad07-719de0986cb0\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.844864 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gq7zg\" (UniqueName: \"kubernetes.io/projected/ed0b0d47-c436-47b1-ad07-719de0986cb0-kube-api-access-gq7zg\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"ed0b0d47-c436-47b1-ad07-719de0986cb0\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.945819 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0b0d47-c436-47b1-ad07-719de0986cb0-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"ed0b0d47-c436-47b1-ad07-719de0986cb0\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.945878 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gq7zg\" (UniqueName: \"kubernetes.io/projected/ed0b0d47-c436-47b1-ad07-719de0986cb0-kube-api-access-gq7zg\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"ed0b0d47-c436-47b1-ad07-719de0986cb0\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.951532 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0b0d47-c436-47b1-ad07-719de0986cb0-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"ed0b0d47-c436-47b1-ad07-719de0986cb0\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:10 crc kubenswrapper[4711]: I0123 08:52:10.962375 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gq7zg\" (UniqueName: \"kubernetes.io/projected/ed0b0d47-c436-47b1-ad07-719de0986cb0-kube-api-access-gq7zg\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"ed0b0d47-c436-47b1-ad07-719de0986cb0\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:11 crc kubenswrapper[4711]: I0123 08:52:11.078085 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:11 crc kubenswrapper[4711]: I0123 08:52:11.487790 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa4c6531-526e-4046-bb9f-975b1bc2a361" path="/var/lib/kubelet/pods/fa4c6531-526e-4046-bb9f-975b1bc2a361/volumes" Jan 23 08:52:11 crc kubenswrapper[4711]: I0123 08:52:11.509687 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:52:11 crc kubenswrapper[4711]: I0123 08:52:11.670500 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"ed0b0d47-c436-47b1-ad07-719de0986cb0","Type":"ContainerStarted","Data":"7551d54fb2d595ab2cb06887df78e176ac75c0f247958381e44b75c5f9473915"} Jan 23 08:52:11 crc kubenswrapper[4711]: E0123 08:52:11.733225 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5e776a19b3d2f2608e90e4869cb24bcb3c454b63474d8286d258c480e74f47f9" cmd=["/usr/bin/pgrep","-r","DRST","nova-compute"] Jan 23 08:52:11 crc kubenswrapper[4711]: E0123 08:52:11.734601 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5e776a19b3d2f2608e90e4869cb24bcb3c454b63474d8286d258c480e74f47f9" cmd=["/usr/bin/pgrep","-r","DRST","nova-compute"] Jan 23 08:52:11 crc kubenswrapper[4711]: E0123 08:52:11.736024 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5e776a19b3d2f2608e90e4869cb24bcb3c454b63474d8286d258c480e74f47f9" cmd=["/usr/bin/pgrep","-r","DRST","nova-compute"] Jan 23 08:52:11 crc kubenswrapper[4711]: E0123 08:52:11.736068 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" podUID="d73adce3-6489-46cd-baca-6b518bfbe671" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:11 crc kubenswrapper[4711]: E0123 08:52:11.937241 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5761f2d8d00fd3f00a0775d319f904d817d62b08ea0cfec0fcc06471a9106397" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:52:11 crc kubenswrapper[4711]: E0123 08:52:11.938758 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5761f2d8d00fd3f00a0775d319f904d817d62b08ea0cfec0fcc06471a9106397" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:52:11 crc kubenswrapper[4711]: E0123 08:52:11.940381 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5761f2d8d00fd3f00a0775d319f904d817d62b08ea0cfec0fcc06471a9106397" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:52:11 crc kubenswrapper[4711]: E0123 08:52:11.940421 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" podUID="a91e016e-1671-4b7e-bd99-054776f3d2f8" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:52:12 crc kubenswrapper[4711]: I0123 08:52:12.687039 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"ed0b0d47-c436-47b1-ad07-719de0986cb0","Type":"ContainerStarted","Data":"7c16e17c62978f75ae1e7474d4b1139142bff92e2f56ee17cb8e14dd0d169eb1"} Jan 23 08:52:12 crc kubenswrapper[4711]: I0123 08:52:12.687944 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:12 crc kubenswrapper[4711]: I0123 08:52:12.710754 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" podStartSLOduration=2.710737281 podStartE2EDuration="2.710737281s" podCreationTimestamp="2026-01-23 08:52:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:52:12.708737693 +0000 UTC m=+1918.281694061" watchObservedRunningTime="2026-01-23 08:52:12.710737281 +0000 UTC m=+1918.283693649" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.091601 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.183427 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d04088f-e0e5-49e1-945d-9c44c5d06962-config-data\") pod \"0d04088f-e0e5-49e1-945d-9c44c5d06962\" (UID: \"0d04088f-e0e5-49e1-945d-9c44c5d06962\") " Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.183594 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rztr\" (UniqueName: \"kubernetes.io/projected/0d04088f-e0e5-49e1-945d-9c44c5d06962-kube-api-access-9rztr\") pod \"0d04088f-e0e5-49e1-945d-9c44c5d06962\" (UID: \"0d04088f-e0e5-49e1-945d-9c44c5d06962\") " Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.183642 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d04088f-e0e5-49e1-945d-9c44c5d06962-logs\") pod \"0d04088f-e0e5-49e1-945d-9c44c5d06962\" (UID: \"0d04088f-e0e5-49e1-945d-9c44c5d06962\") " Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.184376 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d04088f-e0e5-49e1-945d-9c44c5d06962-logs" (OuterVolumeSpecName: "logs") pod "0d04088f-e0e5-49e1-945d-9c44c5d06962" (UID: "0d04088f-e0e5-49e1-945d-9c44c5d06962"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.194743 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d04088f-e0e5-49e1-945d-9c44c5d06962-kube-api-access-9rztr" (OuterVolumeSpecName: "kube-api-access-9rztr") pod "0d04088f-e0e5-49e1-945d-9c44c5d06962" (UID: "0d04088f-e0e5-49e1-945d-9c44c5d06962"). InnerVolumeSpecName "kube-api-access-9rztr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.206885 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d04088f-e0e5-49e1-945d-9c44c5d06962-config-data" (OuterVolumeSpecName: "config-data") pod "0d04088f-e0e5-49e1-945d-9c44c5d06962" (UID: "0d04088f-e0e5-49e1-945d-9c44c5d06962"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.284720 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rztr\" (UniqueName: \"kubernetes.io/projected/0d04088f-e0e5-49e1-945d-9c44c5d06962-kube-api-access-9rztr\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.284753 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d04088f-e0e5-49e1-945d-9c44c5d06962-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.284764 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d04088f-e0e5-49e1-945d-9c44c5d06962-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.697923 4711 generic.go:334] "Generic (PLEG): container finished" podID="0d04088f-e0e5-49e1-945d-9c44c5d06962" containerID="3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393" exitCode=0 Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.697988 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"0d04088f-e0e5-49e1-945d-9c44c5d06962","Type":"ContainerDied","Data":"3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393"} Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.698343 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"0d04088f-e0e5-49e1-945d-9c44c5d06962","Type":"ContainerDied","Data":"35bb74534be674e83118f1c38bc473c26b3045790686497a7c1719a13f5eb276"} Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.698053 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.698388 4711 scope.go:117] "RemoveContainer" containerID="3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.721775 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.727054 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.735229 4711 scope.go:117] "RemoveContainer" containerID="b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.754673 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:52:13 crc kubenswrapper[4711]: E0123 08:52:13.756429 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d04088f-e0e5-49e1-945d-9c44c5d06962" containerName="nova-kuttl-api-log" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.756458 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d04088f-e0e5-49e1-945d-9c44c5d06962" containerName="nova-kuttl-api-log" Jan 23 08:52:13 crc kubenswrapper[4711]: E0123 08:52:13.756490 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d04088f-e0e5-49e1-945d-9c44c5d06962" containerName="nova-kuttl-api-api" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.756497 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d04088f-e0e5-49e1-945d-9c44c5d06962" containerName="nova-kuttl-api-api" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.756701 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d04088f-e0e5-49e1-945d-9c44c5d06962" containerName="nova-kuttl-api-log" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.756718 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d04088f-e0e5-49e1-945d-9c44c5d06962" containerName="nova-kuttl-api-api" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.757709 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.759878 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-api-config-data" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.761300 4711 scope.go:117] "RemoveContainer" containerID="3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393" Jan 23 08:52:13 crc kubenswrapper[4711]: E0123 08:52:13.762012 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393\": container with ID starting with 3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393 not found: ID does not exist" containerID="3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.762043 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393"} err="failed to get container status \"3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393\": rpc error: code = NotFound desc = could not find container \"3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393\": container with ID starting with 3e64981d3ee8ddd4ba68575cc8aa2902c0f238ba083f7b4b1cc9000abf8c7393 not found: ID does not exist" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.762065 4711 scope.go:117] "RemoveContainer" containerID="b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b" Jan 23 08:52:13 crc kubenswrapper[4711]: E0123 08:52:13.763798 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b\": container with ID starting with b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b not found: ID does not exist" containerID="b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.763831 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b"} err="failed to get container status \"b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b\": rpc error: code = NotFound desc = could not find container \"b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b\": container with ID starting with b12503ab0ef1a36765d3a041338d95fdba4f0a0965475625c33c98460ba9e09b not found: ID does not exist" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.769960 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.893480 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8lpk\" (UniqueName: \"kubernetes.io/projected/d194640a-7e19-4627-8671-ebd9bb1eeaa5-kube-api-access-k8lpk\") pod \"nova-kuttl-api-0\" (UID: \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.893713 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d194640a-7e19-4627-8671-ebd9bb1eeaa5-config-data\") pod \"nova-kuttl-api-0\" (UID: \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.893981 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d194640a-7e19-4627-8671-ebd9bb1eeaa5-logs\") pod \"nova-kuttl-api-0\" (UID: \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.995649 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8lpk\" (UniqueName: \"kubernetes.io/projected/d194640a-7e19-4627-8671-ebd9bb1eeaa5-kube-api-access-k8lpk\") pod \"nova-kuttl-api-0\" (UID: \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.996165 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d194640a-7e19-4627-8671-ebd9bb1eeaa5-config-data\") pod \"nova-kuttl-api-0\" (UID: \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.997010 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d194640a-7e19-4627-8671-ebd9bb1eeaa5-logs\") pod \"nova-kuttl-api-0\" (UID: \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:13 crc kubenswrapper[4711]: I0123 08:52:13.997361 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d194640a-7e19-4627-8671-ebd9bb1eeaa5-logs\") pod \"nova-kuttl-api-0\" (UID: \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.010171 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d194640a-7e19-4627-8671-ebd9bb1eeaa5-config-data\") pod \"nova-kuttl-api-0\" (UID: \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.015132 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8lpk\" (UniqueName: \"kubernetes.io/projected/d194640a-7e19-4627-8671-ebd9bb1eeaa5-kube-api-access-k8lpk\") pod \"nova-kuttl-api-0\" (UID: \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.081443 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.514547 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:52:14 crc kubenswrapper[4711]: W0123 08:52:14.536628 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd194640a_7e19_4627_8671_ebd9bb1eeaa5.slice/crio-97ae47c32e5d284f6ed9294b9925526b55d8882853d45975aefd1a363a14a680 WatchSource:0}: Error finding container 97ae47c32e5d284f6ed9294b9925526b55d8882853d45975aefd1a363a14a680: Status 404 returned error can't find the container with id 97ae47c32e5d284f6ed9294b9925526b55d8882853d45975aefd1a363a14a680 Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.710307 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"d194640a-7e19-4627-8671-ebd9bb1eeaa5","Type":"ContainerStarted","Data":"074d94d05775b76c50f474482b56a94bf8794148b41b844ae9116888639e7815"} Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.710353 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"d194640a-7e19-4627-8671-ebd9bb1eeaa5","Type":"ContainerStarted","Data":"97ae47c32e5d284f6ed9294b9925526b55d8882853d45975aefd1a363a14a680"} Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.714943 4711 generic.go:334] "Generic (PLEG): container finished" podID="3ce6c1c7-7443-468e-9711-062a99175155" containerID="0f67f5d0b9b6331861ac2e372b31a2b792b0fab8a04dc383eb166e7b399b52d8" exitCode=0 Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.714992 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"3ce6c1c7-7443-468e-9711-062a99175155","Type":"ContainerDied","Data":"0f67f5d0b9b6331861ac2e372b31a2b792b0fab8a04dc383eb166e7b399b52d8"} Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.799403 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.914121 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ce6c1c7-7443-468e-9711-062a99175155-config-data\") pod \"3ce6c1c7-7443-468e-9711-062a99175155\" (UID: \"3ce6c1c7-7443-468e-9711-062a99175155\") " Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.914234 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94hrh\" (UniqueName: \"kubernetes.io/projected/3ce6c1c7-7443-468e-9711-062a99175155-kube-api-access-94hrh\") pod \"3ce6c1c7-7443-468e-9711-062a99175155\" (UID: \"3ce6c1c7-7443-468e-9711-062a99175155\") " Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.917146 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ce6c1c7-7443-468e-9711-062a99175155-kube-api-access-94hrh" (OuterVolumeSpecName: "kube-api-access-94hrh") pod "3ce6c1c7-7443-468e-9711-062a99175155" (UID: "3ce6c1c7-7443-468e-9711-062a99175155"). InnerVolumeSpecName "kube-api-access-94hrh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:14 crc kubenswrapper[4711]: I0123 08:52:14.937252 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ce6c1c7-7443-468e-9711-062a99175155-config-data" (OuterVolumeSpecName: "config-data") pod "3ce6c1c7-7443-468e-9711-062a99175155" (UID: "3ce6c1c7-7443-468e-9711-062a99175155"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.016926 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ce6c1c7-7443-468e-9711-062a99175155-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.016975 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94hrh\" (UniqueName: \"kubernetes.io/projected/3ce6c1c7-7443-468e-9711-062a99175155-kube-api-access-94hrh\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.484089 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d04088f-e0e5-49e1-945d-9c44c5d06962" path="/var/lib/kubelet/pods/0d04088f-e0e5-49e1-945d-9c44c5d06962/volumes" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.724574 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"3ce6c1c7-7443-468e-9711-062a99175155","Type":"ContainerDied","Data":"5574d01c749342d5f99c3064d5669224d632ee160edf64905818ff7151e68577"} Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.724622 4711 scope.go:117] "RemoveContainer" containerID="0f67f5d0b9b6331861ac2e372b31a2b792b0fab8a04dc383eb166e7b399b52d8" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.724768 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.730674 4711 generic.go:334] "Generic (PLEG): container finished" podID="a91e016e-1671-4b7e-bd99-054776f3d2f8" containerID="5761f2d8d00fd3f00a0775d319f904d817d62b08ea0cfec0fcc06471a9106397" exitCode=0 Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.730734 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"a91e016e-1671-4b7e-bd99-054776f3d2f8","Type":"ContainerDied","Data":"5761f2d8d00fd3f00a0775d319f904d817d62b08ea0cfec0fcc06471a9106397"} Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.732320 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"d194640a-7e19-4627-8671-ebd9bb1eeaa5","Type":"ContainerStarted","Data":"aff96ca15a3679a9408c5c8fe00137e0a3440055a2e619dc8a346f5c1bb8398d"} Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.752064 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-0" podStartSLOduration=2.752043028 podStartE2EDuration="2.752043028s" podCreationTimestamp="2026-01-23 08:52:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:52:15.747461276 +0000 UTC m=+1921.320417654" watchObservedRunningTime="2026-01-23 08:52:15.752043028 +0000 UTC m=+1921.324999396" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.772255 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.776675 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.785467 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:52:15 crc kubenswrapper[4711]: E0123 08:52:15.785823 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ce6c1c7-7443-468e-9711-062a99175155" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.785836 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ce6c1c7-7443-468e-9711-062a99175155" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.786000 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ce6c1c7-7443-468e-9711-062a99175155" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.786479 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.793311 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-scheduler-config-data" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.798822 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.876221 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.929552 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc485\" (UniqueName: \"kubernetes.io/projected/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c-kube-api-access-hc485\") pod \"nova-kuttl-scheduler-0\" (UID: \"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:15 crc kubenswrapper[4711]: I0123 08:52:15.929593 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.031256 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bt5qv\" (UniqueName: \"kubernetes.io/projected/a91e016e-1671-4b7e-bd99-054776f3d2f8-kube-api-access-bt5qv\") pod \"a91e016e-1671-4b7e-bd99-054776f3d2f8\" (UID: \"a91e016e-1671-4b7e-bd99-054776f3d2f8\") " Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.031355 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a91e016e-1671-4b7e-bd99-054776f3d2f8-config-data\") pod \"a91e016e-1671-4b7e-bd99-054776f3d2f8\" (UID: \"a91e016e-1671-4b7e-bd99-054776f3d2f8\") " Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.031830 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc485\" (UniqueName: \"kubernetes.io/projected/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c-kube-api-access-hc485\") pod \"nova-kuttl-scheduler-0\" (UID: \"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.031927 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.044875 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a91e016e-1671-4b7e-bd99-054776f3d2f8-kube-api-access-bt5qv" (OuterVolumeSpecName: "kube-api-access-bt5qv") pod "a91e016e-1671-4b7e-bd99-054776f3d2f8" (UID: "a91e016e-1671-4b7e-bd99-054776f3d2f8"). InnerVolumeSpecName "kube-api-access-bt5qv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.045761 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.052176 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc485\" (UniqueName: \"kubernetes.io/projected/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c-kube-api-access-hc485\") pod \"nova-kuttl-scheduler-0\" (UID: \"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.068645 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a91e016e-1671-4b7e-bd99-054776f3d2f8-config-data" (OuterVolumeSpecName: "config-data") pod "a91e016e-1671-4b7e-bd99-054776f3d2f8" (UID: "a91e016e-1671-4b7e-bd99-054776f3d2f8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.113384 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.138026 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bt5qv\" (UniqueName: \"kubernetes.io/projected/a91e016e-1671-4b7e-bd99-054776f3d2f8-kube-api-access-bt5qv\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.138084 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a91e016e-1671-4b7e-bd99-054776f3d2f8-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.174698 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.473775 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:52:16 crc kubenswrapper[4711]: E0123 08:52:16.474291 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.601468 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:52:16 crc kubenswrapper[4711]: W0123 08:52:16.607560 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd09491b5_bdb3_459c_a33b_0eae7f2b6f9c.slice/crio-969c0c76ebcaf5e8ffdf2102e4b341fd0b09f9d7d00e239b124aa9a5561562c6 WatchSource:0}: Error finding container 969c0c76ebcaf5e8ffdf2102e4b341fd0b09f9d7d00e239b124aa9a5561562c6: Status 404 returned error can't find the container with id 969c0c76ebcaf5e8ffdf2102e4b341fd0b09f9d7d00e239b124aa9a5561562c6 Jan 23 08:52:16 crc kubenswrapper[4711]: E0123 08:52:16.735860 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5e776a19b3d2f2608e90e4869cb24bcb3c454b63474d8286d258c480e74f47f9" cmd=["/usr/bin/pgrep","-r","DRST","nova-compute"] Jan 23 08:52:16 crc kubenswrapper[4711]: E0123 08:52:16.741455 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5e776a19b3d2f2608e90e4869cb24bcb3c454b63474d8286d258c480e74f47f9" cmd=["/usr/bin/pgrep","-r","DRST","nova-compute"] Jan 23 08:52:16 crc kubenswrapper[4711]: E0123 08:52:16.743181 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5e776a19b3d2f2608e90e4869cb24bcb3c454b63474d8286d258c480e74f47f9" cmd=["/usr/bin/pgrep","-r","DRST","nova-compute"] Jan 23 08:52:16 crc kubenswrapper[4711]: E0123 08:52:16.743224 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" podUID="d73adce3-6489-46cd-baca-6b518bfbe671" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.746691 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"a91e016e-1671-4b7e-bd99-054776f3d2f8","Type":"ContainerDied","Data":"36fe418dd9f2ff93aaecce5b7aa860870dcce7894e56d6435438b89665412db6"} Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.746750 4711 scope.go:117] "RemoveContainer" containerID="5761f2d8d00fd3f00a0775d319f904d817d62b08ea0cfec0fcc06471a9106397" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.746702 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.749988 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c","Type":"ContainerStarted","Data":"969c0c76ebcaf5e8ffdf2102e4b341fd0b09f9d7d00e239b124aa9a5561562c6"} Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.786858 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.798610 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.812494 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:52:16 crc kubenswrapper[4711]: E0123 08:52:16.812851 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a91e016e-1671-4b7e-bd99-054776f3d2f8" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.812869 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a91e016e-1671-4b7e-bd99-054776f3d2f8" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.813013 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a91e016e-1671-4b7e-bd99-054776f3d2f8" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.813491 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.813598 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.844278 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-config-data" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.952466 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rf9hn\" (UniqueName: \"kubernetes.io/projected/03a26174-c17e-4e64-a30d-e1ddccee512c-kube-api-access-rf9hn\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"03a26174-c17e-4e64-a30d-e1ddccee512c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:16 crc kubenswrapper[4711]: I0123 08:52:16.952954 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03a26174-c17e-4e64-a30d-e1ddccee512c-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"03a26174-c17e-4e64-a30d-e1ddccee512c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.054265 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03a26174-c17e-4e64-a30d-e1ddccee512c-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"03a26174-c17e-4e64-a30d-e1ddccee512c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.054354 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rf9hn\" (UniqueName: \"kubernetes.io/projected/03a26174-c17e-4e64-a30d-e1ddccee512c-kube-api-access-rf9hn\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"03a26174-c17e-4e64-a30d-e1ddccee512c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.059938 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03a26174-c17e-4e64-a30d-e1ddccee512c-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"03a26174-c17e-4e64-a30d-e1ddccee512c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.070679 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rf9hn\" (UniqueName: \"kubernetes.io/projected/03a26174-c17e-4e64-a30d-e1ddccee512c-kube-api-access-rf9hn\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"03a26174-c17e-4e64-a30d-e1ddccee512c\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.175919 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.489434 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ce6c1c7-7443-468e-9711-062a99175155" path="/var/lib/kubelet/pods/3ce6c1c7-7443-468e-9711-062a99175155/volumes" Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.489972 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a91e016e-1671-4b7e-bd99-054776f3d2f8" path="/var/lib/kubelet/pods/a91e016e-1671-4b7e-bd99-054776f3d2f8/volumes" Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.609712 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.761471 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"03a26174-c17e-4e64-a30d-e1ddccee512c","Type":"ContainerStarted","Data":"3df6015660c605ea31e1627b9013c21ea0175561f10e31f5d8e498b759317ae7"} Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.764137 4711 generic.go:334] "Generic (PLEG): container finished" podID="d73adce3-6489-46cd-baca-6b518bfbe671" containerID="5e776a19b3d2f2608e90e4869cb24bcb3c454b63474d8286d258c480e74f47f9" exitCode=0 Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.764202 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" event={"ID":"d73adce3-6489-46cd-baca-6b518bfbe671","Type":"ContainerDied","Data":"5e776a19b3d2f2608e90e4869cb24bcb3c454b63474d8286d258c480e74f47f9"} Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.766774 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c","Type":"ContainerStarted","Data":"1242a681f3dbcaf5350cde7b252dddd25360bfa31b2cd77fc07a6680a337d459"} Jan 23 08:52:17 crc kubenswrapper[4711]: I0123 08:52:17.789635 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podStartSLOduration=2.789616887 podStartE2EDuration="2.789616887s" podCreationTimestamp="2026-01-23 08:52:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:52:17.786984214 +0000 UTC m=+1923.359940582" watchObservedRunningTime="2026-01-23 08:52:17.789616887 +0000 UTC m=+1923.362573265" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.117098 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.272101 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d73adce3-6489-46cd-baca-6b518bfbe671-config-data\") pod \"d73adce3-6489-46cd-baca-6b518bfbe671\" (UID: \"d73adce3-6489-46cd-baca-6b518bfbe671\") " Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.272302 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksdld\" (UniqueName: \"kubernetes.io/projected/d73adce3-6489-46cd-baca-6b518bfbe671-kube-api-access-ksdld\") pod \"d73adce3-6489-46cd-baca-6b518bfbe671\" (UID: \"d73adce3-6489-46cd-baca-6b518bfbe671\") " Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.283792 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d73adce3-6489-46cd-baca-6b518bfbe671-kube-api-access-ksdld" (OuterVolumeSpecName: "kube-api-access-ksdld") pod "d73adce3-6489-46cd-baca-6b518bfbe671" (UID: "d73adce3-6489-46cd-baca-6b518bfbe671"). InnerVolumeSpecName "kube-api-access-ksdld". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.307640 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d73adce3-6489-46cd-baca-6b518bfbe671-config-data" (OuterVolumeSpecName: "config-data") pod "d73adce3-6489-46cd-baca-6b518bfbe671" (UID: "d73adce3-6489-46cd-baca-6b518bfbe671"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.374313 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksdld\" (UniqueName: \"kubernetes.io/projected/d73adce3-6489-46cd-baca-6b518bfbe671-kube-api-access-ksdld\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.374544 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d73adce3-6489-46cd-baca-6b518bfbe671-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.784004 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"03a26174-c17e-4e64-a30d-e1ddccee512c","Type":"ContainerStarted","Data":"b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d"} Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.784150 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.786375 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" event={"ID":"d73adce3-6489-46cd-baca-6b518bfbe671","Type":"ContainerDied","Data":"5ce360d7848fb09fc249734c15ff05305e8808872463cb5dc43abe7986f111a8"} Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.786706 4711 scope.go:117] "RemoveContainer" containerID="5e776a19b3d2f2608e90e4869cb24bcb3c454b63474d8286d258c480e74f47f9" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.786447 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.809218 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" podStartSLOduration=2.809198753 podStartE2EDuration="2.809198753s" podCreationTimestamp="2026-01-23 08:52:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:52:18.806781273 +0000 UTC m=+1924.379737641" watchObservedRunningTime="2026-01-23 08:52:18.809198753 +0000 UTC m=+1924.382155111" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.827559 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0"] Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.839606 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0"] Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.850392 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0"] Jan 23 08:52:18 crc kubenswrapper[4711]: E0123 08:52:18.850821 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d73adce3-6489-46cd-baca-6b518bfbe671" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.850837 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d73adce3-6489-46cd-baca-6b518bfbe671" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.850976 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d73adce3-6489-46cd-baca-6b518bfbe671" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.851557 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.853914 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-compute-fake1-compute-config-data" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.859023 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0"] Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.990391 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc789\" (UniqueName: \"kubernetes.io/projected/d76b20ef-bbfc-4399-86bc-7a69eeafa479-kube-api-access-fc789\") pod \"nova-kuttl-cell1-compute-fake1-compute-0\" (UID: \"d76b20ef-bbfc-4399-86bc-7a69eeafa479\") " pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:18 crc kubenswrapper[4711]: I0123 08:52:18.990466 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data\") pod \"nova-kuttl-cell1-compute-fake1-compute-0\" (UID: \"d76b20ef-bbfc-4399-86bc-7a69eeafa479\") " pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:19 crc kubenswrapper[4711]: I0123 08:52:19.092244 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc789\" (UniqueName: \"kubernetes.io/projected/d76b20ef-bbfc-4399-86bc-7a69eeafa479-kube-api-access-fc789\") pod \"nova-kuttl-cell1-compute-fake1-compute-0\" (UID: \"d76b20ef-bbfc-4399-86bc-7a69eeafa479\") " pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:19 crc kubenswrapper[4711]: I0123 08:52:19.092315 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data\") pod \"nova-kuttl-cell1-compute-fake1-compute-0\" (UID: \"d76b20ef-bbfc-4399-86bc-7a69eeafa479\") " pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:19 crc kubenswrapper[4711]: I0123 08:52:19.097313 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data\") pod \"nova-kuttl-cell1-compute-fake1-compute-0\" (UID: \"d76b20ef-bbfc-4399-86bc-7a69eeafa479\") " pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:19 crc kubenswrapper[4711]: I0123 08:52:19.108327 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc789\" (UniqueName: \"kubernetes.io/projected/d76b20ef-bbfc-4399-86bc-7a69eeafa479-kube-api-access-fc789\") pod \"nova-kuttl-cell1-compute-fake1-compute-0\" (UID: \"d76b20ef-bbfc-4399-86bc-7a69eeafa479\") " pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:19 crc kubenswrapper[4711]: I0123 08:52:19.176717 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:19 crc kubenswrapper[4711]: I0123 08:52:19.483056 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d73adce3-6489-46cd-baca-6b518bfbe671" path="/var/lib/kubelet/pods/d73adce3-6489-46cd-baca-6b518bfbe671/volumes" Jan 23 08:52:19 crc kubenswrapper[4711]: I0123 08:52:19.605720 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0"] Jan 23 08:52:19 crc kubenswrapper[4711]: I0123 08:52:19.795155 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" event={"ID":"d76b20ef-bbfc-4399-86bc-7a69eeafa479","Type":"ContainerStarted","Data":"f3881c6c2617bffed63a8054e1ffb6dd2755369f85c7c2880982eeb393f1fe3a"} Jan 23 08:52:19 crc kubenswrapper[4711]: I0123 08:52:19.795210 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" event={"ID":"d76b20ef-bbfc-4399-86bc-7a69eeafa479","Type":"ContainerStarted","Data":"91183da4b4cf87cb06303d0a2de3022dfe286118a23e160658b3f932d71c39c4"} Jan 23 08:52:19 crc kubenswrapper[4711]: I0123 08:52:19.796756 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:19 crc kubenswrapper[4711]: I0123 08:52:19.815890 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" podStartSLOduration=1.815864331 podStartE2EDuration="1.815864331s" podCreationTimestamp="2026-01-23 08:52:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:52:19.81052647 +0000 UTC m=+1925.383482848" watchObservedRunningTime="2026-01-23 08:52:19.815864331 +0000 UTC m=+1925.388820699" Jan 23 08:52:19 crc kubenswrapper[4711]: I0123 08:52:19.835114 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" probeResult="failure" output="" Jan 23 08:52:20 crc kubenswrapper[4711]: I0123 08:52:20.834139 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:21 crc kubenswrapper[4711]: I0123 08:52:21.175351 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:22 crc kubenswrapper[4711]: I0123 08:52:22.203969 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:23 crc kubenswrapper[4711]: I0123 08:52:23.833234 4711 generic.go:334] "Generic (PLEG): container finished" podID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerID="f3881c6c2617bffed63a8054e1ffb6dd2755369f85c7c2880982eeb393f1fe3a" exitCode=0 Jan 23 08:52:23 crc kubenswrapper[4711]: I0123 08:52:23.833291 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" event={"ID":"d76b20ef-bbfc-4399-86bc-7a69eeafa479","Type":"ContainerDied","Data":"f3881c6c2617bffed63a8054e1ffb6dd2755369f85c7c2880982eeb393f1fe3a"} Jan 23 08:52:23 crc kubenswrapper[4711]: I0123 08:52:23.834067 4711 scope.go:117] "RemoveContainer" containerID="f3881c6c2617bffed63a8054e1ffb6dd2755369f85c7c2880982eeb393f1fe3a" Jan 23 08:52:24 crc kubenswrapper[4711]: I0123 08:52:24.081847 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:24 crc kubenswrapper[4711]: I0123 08:52:24.082172 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:24 crc kubenswrapper[4711]: I0123 08:52:24.177761 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:24 crc kubenswrapper[4711]: I0123 08:52:24.844760 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" event={"ID":"d76b20ef-bbfc-4399-86bc-7a69eeafa479","Type":"ContainerStarted","Data":"87b0a1dfa6d0ea7a3c96fbf0c314ea8ce343511692fcf64c91dbc607517d0109"} Jan 23 08:52:24 crc kubenswrapper[4711]: I0123 08:52:24.845204 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:24 crc kubenswrapper[4711]: I0123 08:52:24.874706 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:25 crc kubenswrapper[4711]: I0123 08:52:25.164822 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.207:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:52:25 crc kubenswrapper[4711]: I0123 08:52:25.164999 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.207:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:52:26 crc kubenswrapper[4711]: I0123 08:52:26.175109 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:26 crc kubenswrapper[4711]: I0123 08:52:26.200689 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:26 crc kubenswrapper[4711]: I0123 08:52:26.882535 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:27 crc kubenswrapper[4711]: I0123 08:52:27.474360 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:52:27 crc kubenswrapper[4711]: E0123 08:52:27.474912 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:52:27 crc kubenswrapper[4711]: I0123 08:52:27.872122 4711 generic.go:334] "Generic (PLEG): container finished" podID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerID="87b0a1dfa6d0ea7a3c96fbf0c314ea8ce343511692fcf64c91dbc607517d0109" exitCode=0 Jan 23 08:52:27 crc kubenswrapper[4711]: I0123 08:52:27.873413 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" event={"ID":"d76b20ef-bbfc-4399-86bc-7a69eeafa479","Type":"ContainerDied","Data":"87b0a1dfa6d0ea7a3c96fbf0c314ea8ce343511692fcf64c91dbc607517d0109"} Jan 23 08:52:27 crc kubenswrapper[4711]: I0123 08:52:27.873463 4711 scope.go:117] "RemoveContainer" containerID="f3881c6c2617bffed63a8054e1ffb6dd2755369f85c7c2880982eeb393f1fe3a" Jan 23 08:52:27 crc kubenswrapper[4711]: I0123 08:52:27.874052 4711 scope.go:117] "RemoveContainer" containerID="87b0a1dfa6d0ea7a3c96fbf0c314ea8ce343511692fcf64c91dbc607517d0109" Jan 23 08:52:27 crc kubenswrapper[4711]: E0123 08:52:27.874452 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-kuttl-cell1-compute-fake1-compute-compute\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-kuttl-cell1-compute-fake1-compute-compute pod=nova-kuttl-cell1-compute-fake1-compute-0_nova-kuttl-default(d76b20ef-bbfc-4399-86bc-7a69eeafa479)\"" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" Jan 23 08:52:29 crc kubenswrapper[4711]: I0123 08:52:29.176839 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:29 crc kubenswrapper[4711]: I0123 08:52:29.177492 4711 scope.go:117] "RemoveContainer" containerID="87b0a1dfa6d0ea7a3c96fbf0c314ea8ce343511692fcf64c91dbc607517d0109" Jan 23 08:52:29 crc kubenswrapper[4711]: I0123 08:52:29.177671 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:29 crc kubenswrapper[4711]: E0123 08:52:29.177774 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-kuttl-cell1-compute-fake1-compute-compute\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-kuttl-cell1-compute-fake1-compute-compute pod=nova-kuttl-cell1-compute-fake1-compute-0_nova-kuttl-default(d76b20ef-bbfc-4399-86bc-7a69eeafa479)\"" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" Jan 23 08:52:29 crc kubenswrapper[4711]: I0123 08:52:29.892458 4711 scope.go:117] "RemoveContainer" containerID="87b0a1dfa6d0ea7a3c96fbf0c314ea8ce343511692fcf64c91dbc607517d0109" Jan 23 08:52:29 crc kubenswrapper[4711]: E0123 08:52:29.893220 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-kuttl-cell1-compute-fake1-compute-compute\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-kuttl-cell1-compute-fake1-compute-compute pod=nova-kuttl-cell1-compute-fake1-compute-0_nova-kuttl-default(d76b20ef-bbfc-4399-86bc-7a69eeafa479)\"" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" Jan 23 08:52:34 crc kubenswrapper[4711]: I0123 08:52:34.097830 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:34 crc kubenswrapper[4711]: I0123 08:52:34.098187 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:34 crc kubenswrapper[4711]: I0123 08:52:34.098490 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:34 crc kubenswrapper[4711]: I0123 08:52:34.098553 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:34 crc kubenswrapper[4711]: I0123 08:52:34.101845 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:34 crc kubenswrapper[4711]: I0123 08:52:34.104305 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:38 crc kubenswrapper[4711]: I0123 08:52:38.474630 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:52:38 crc kubenswrapper[4711]: E0123 08:52:38.475117 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:52:43 crc kubenswrapper[4711]: I0123 08:52:43.473323 4711 scope.go:117] "RemoveContainer" containerID="87b0a1dfa6d0ea7a3c96fbf0c314ea8ce343511692fcf64c91dbc607517d0109" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.012192 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" event={"ID":"d76b20ef-bbfc-4399-86bc-7a69eeafa479","Type":"ContainerStarted","Data":"32507dac522cc1cee510b7d3f46eae113739378b0c8ad7d725ea6ff76140718c"} Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.012788 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.039827 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.475809 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.495575 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.507580 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-host-discover-dw6xz"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.523736 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.544771 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-pstd6"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.550565 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-ggc75"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.558548 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.565423 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/novacell1a2a4-account-delete-4x9qk"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.566677 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.582646 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novacell1a2a4-account-delete-4x9qk"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.622961 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.623244 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="d09491b5-bdb3-459c-a33b-0eae7f2b6f9c" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://1242a681f3dbcaf5350cde7b252dddd25360bfa31b2cd77fc07a6680a337d459" gracePeriod=30 Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.642622 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/novacell0c1fc-account-delete-lvmdq"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.644807 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.647970 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novacell0c1fc-account-delete-lvmdq"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.745838 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.754410 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.754724 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" podUID="03a26174-c17e-4e64-a30d-e1ddccee512c" containerName="nova-kuttl-cell0-conductor-conductor" containerID="cri-o://b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d" gracePeriod=30 Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.770421 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.770864 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" podUID="3ecb36fd-83f9-430e-b61f-1a27d907d613" containerName="nova-kuttl-cell1-novncproxy-novncproxy" containerID="cri-o://eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4" gracePeriod=30 Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.771848 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tz8l\" (UniqueName: \"kubernetes.io/projected/25fd533a-6c53-4c39-8dff-3d3511f89d5d-kube-api-access-9tz8l\") pod \"novacell0c1fc-account-delete-lvmdq\" (UID: \"25fd533a-6c53-4c39-8dff-3d3511f89d5d\") " pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.773256 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25fd533a-6c53-4c39-8dff-3d3511f89d5d-operator-scripts\") pod \"novacell0c1fc-account-delete-lvmdq\" (UID: \"25fd533a-6c53-4c39-8dff-3d3511f89d5d\") " pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.773425 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmct5\" (UniqueName: \"kubernetes.io/projected/0bf3a05e-119e-459b-b2af-0a2dece0ba15-kube-api-access-cmct5\") pod \"novacell1a2a4-account-delete-4x9qk\" (UID: \"0bf3a05e-119e-459b-b2af-0a2dece0ba15\") " pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.773636 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bf3a05e-119e-459b-b2af-0a2dece0ba15-operator-scripts\") pod \"novacell1a2a4-account-delete-4x9qk\" (UID: \"0bf3a05e-119e-459b-b2af-0a2dece0ba15\") " pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.789442 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-jcd9c"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.810724 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.811031 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" containerName="nova-kuttl-api-log" containerID="cri-o://074d94d05775b76c50f474482b56a94bf8794148b41b844ae9116888639e7815" gracePeriod=30 Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.811137 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" containerName="nova-kuttl-api-api" containerID="cri-o://aff96ca15a3679a9408c5c8fe00137e0a3440055a2e619dc8a346f5c1bb8398d" gracePeriod=30 Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.826536 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/novaapi1145-account-delete-kh8nx"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.827630 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.847635 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novaapi1145-account-delete-kh8nx"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.867577 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.867873 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerName="nova-kuttl-metadata-log" containerID="cri-o://f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07" gracePeriod=30 Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.868378 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerName="nova-kuttl-metadata-metadata" containerID="cri-o://12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818" gracePeriod=30 Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.879421 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tz8l\" (UniqueName: \"kubernetes.io/projected/25fd533a-6c53-4c39-8dff-3d3511f89d5d-kube-api-access-9tz8l\") pod \"novacell0c1fc-account-delete-lvmdq\" (UID: \"25fd533a-6c53-4c39-8dff-3d3511f89d5d\") " pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.879505 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25fd533a-6c53-4c39-8dff-3d3511f89d5d-operator-scripts\") pod \"novacell0c1fc-account-delete-lvmdq\" (UID: \"25fd533a-6c53-4c39-8dff-3d3511f89d5d\") " pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.879706 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmct5\" (UniqueName: \"kubernetes.io/projected/0bf3a05e-119e-459b-b2af-0a2dece0ba15-kube-api-access-cmct5\") pod \"novacell1a2a4-account-delete-4x9qk\" (UID: \"0bf3a05e-119e-459b-b2af-0a2dece0ba15\") " pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.879756 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bf3a05e-119e-459b-b2af-0a2dece0ba15-operator-scripts\") pod \"novacell1a2a4-account-delete-4x9qk\" (UID: \"0bf3a05e-119e-459b-b2af-0a2dece0ba15\") " pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.880587 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bf3a05e-119e-459b-b2af-0a2dece0ba15-operator-scripts\") pod \"novacell1a2a4-account-delete-4x9qk\" (UID: \"0bf3a05e-119e-459b-b2af-0a2dece0ba15\") " pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.881491 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25fd533a-6c53-4c39-8dff-3d3511f89d5d-operator-scripts\") pod \"novacell0c1fc-account-delete-lvmdq\" (UID: \"25fd533a-6c53-4c39-8dff-3d3511f89d5d\") " pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.890470 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.901940 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.902171 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" podUID="ed0b0d47-c436-47b1-ad07-719de0986cb0" containerName="nova-kuttl-cell1-conductor-conductor" containerID="cri-o://7c16e17c62978f75ae1e7474d4b1139142bff92e2f56ee17cb8e14dd0d169eb1" gracePeriod=30 Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.910036 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tz8l\" (UniqueName: \"kubernetes.io/projected/25fd533a-6c53-4c39-8dff-3d3511f89d5d-kube-api-access-9tz8l\") pod \"novacell0c1fc-account-delete-lvmdq\" (UID: \"25fd533a-6c53-4c39-8dff-3d3511f89d5d\") " pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.911675 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-lg52n"] Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.931123 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmct5\" (UniqueName: \"kubernetes.io/projected/0bf3a05e-119e-459b-b2af-0a2dece0ba15-kube-api-access-cmct5\") pod \"novacell1a2a4-account-delete-4x9qk\" (UID: \"0bf3a05e-119e-459b-b2af-0a2dece0ba15\") " pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.983583 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87qpc\" (UniqueName: \"kubernetes.io/projected/10576897-3cea-4c76-b889-ce7edb150d32-kube-api-access-87qpc\") pod \"novaapi1145-account-delete-kh8nx\" (UID: \"10576897-3cea-4c76-b889-ce7edb150d32\") " pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.983697 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10576897-3cea-4c76-b889-ce7edb150d32-operator-scripts\") pod \"novaapi1145-account-delete-kh8nx\" (UID: \"10576897-3cea-4c76-b889-ce7edb150d32\") " pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" Jan 23 08:52:44 crc kubenswrapper[4711]: I0123 08:52:44.991597 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.032534 4711 generic.go:334] "Generic (PLEG): container finished" podID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" containerID="074d94d05775b76c50f474482b56a94bf8794148b41b844ae9116888639e7815" exitCode=143 Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.032652 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"d194640a-7e19-4627-8671-ebd9bb1eeaa5","Type":"ContainerDied","Data":"074d94d05775b76c50f474482b56a94bf8794148b41b844ae9116888639e7815"} Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.041942 4711 generic.go:334] "Generic (PLEG): container finished" podID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerID="f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07" exitCode=143 Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.042121 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"35f8100a-5b47-4709-8005-06fd54d0da7a","Type":"ContainerDied","Data":"f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07"} Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.042428 4711 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" secret="" err="secret \"nova-nova-kuttl-dockercfg-nksnh\" not found" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.086436 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87qpc\" (UniqueName: \"kubernetes.io/projected/10576897-3cea-4c76-b889-ce7edb150d32-kube-api-access-87qpc\") pod \"novaapi1145-account-delete-kh8nx\" (UID: \"10576897-3cea-4c76-b889-ce7edb150d32\") " pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.086830 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10576897-3cea-4c76-b889-ce7edb150d32-operator-scripts\") pod \"novaapi1145-account-delete-kh8nx\" (UID: \"10576897-3cea-4c76-b889-ce7edb150d32\") " pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" Jan 23 08:52:45 crc kubenswrapper[4711]: E0123 08:52:45.086949 4711 secret.go:188] Couldn't get secret nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-config-data: secret "nova-kuttl-cell1-compute-fake1-compute-config-data" not found Jan 23 08:52:45 crc kubenswrapper[4711]: E0123 08:52:45.087016 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data podName:d76b20ef-bbfc-4399-86bc-7a69eeafa479 nodeName:}" failed. No retries permitted until 2026-01-23 08:52:45.586995934 +0000 UTC m=+1951.159952302 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data") pod "nova-kuttl-cell1-compute-fake1-compute-0" (UID: "d76b20ef-bbfc-4399-86bc-7a69eeafa479") : secret "nova-kuttl-cell1-compute-fake1-compute-config-data" not found Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.087800 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10576897-3cea-4c76-b889-ce7edb150d32-operator-scripts\") pod \"novaapi1145-account-delete-kh8nx\" (UID: \"10576897-3cea-4c76-b889-ce7edb150d32\") " pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.112546 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87qpc\" (UniqueName: \"kubernetes.io/projected/10576897-3cea-4c76-b889-ce7edb150d32-kube-api-access-87qpc\") pod \"novaapi1145-account-delete-kh8nx\" (UID: \"10576897-3cea-4c76-b889-ce7edb150d32\") " pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.163147 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.190622 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.469313 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novacell0c1fc-account-delete-lvmdq"] Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.495317 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2916172d-3d37-431e-9311-1846770d5c02" path="/var/lib/kubelet/pods/2916172d-3d37-431e-9311-1846770d5c02/volumes" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.496061 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34a63f0b-9154-4762-b7af-d018f5a7d69a" path="/var/lib/kubelet/pods/34a63f0b-9154-4762-b7af-d018f5a7d69a/volumes" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.496588 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a43e07a-6038-4e84-8fb0-3163c706ebfd" path="/var/lib/kubelet/pods/4a43e07a-6038-4e84-8fb0-3163c706ebfd/volumes" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.497980 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="516a75f8-e3ec-4723-ab0e-e0f2656077e8" path="/var/lib/kubelet/pods/516a75f8-e3ec-4723-ab0e-e0f2656077e8/volumes" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.498612 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68a17610-bf27-4760-a41f-f575d1de98ad" path="/var/lib/kubelet/pods/68a17610-bf27-4760-a41f-f575d1de98ad/volumes" Jan 23 08:52:45 crc kubenswrapper[4711]: E0123 08:52:45.597475 4711 secret.go:188] Couldn't get secret nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-config-data: secret "nova-kuttl-cell1-compute-fake1-compute-config-data" not found Jan 23 08:52:45 crc kubenswrapper[4711]: E0123 08:52:45.597888 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data podName:d76b20ef-bbfc-4399-86bc-7a69eeafa479 nodeName:}" failed. No retries permitted until 2026-01-23 08:52:46.597863533 +0000 UTC m=+1952.170819901 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data") pod "nova-kuttl-cell1-compute-fake1-compute-0" (UID: "d76b20ef-bbfc-4399-86bc-7a69eeafa479") : secret "nova-kuttl-cell1-compute-fake1-compute-config-data" not found Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.623376 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novaapi1145-account-delete-kh8nx"] Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.701461 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.713323 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/novacell1a2a4-account-delete-4x9qk"] Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.800885 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgf8x\" (UniqueName: \"kubernetes.io/projected/3ecb36fd-83f9-430e-b61f-1a27d907d613-kube-api-access-lgf8x\") pod \"3ecb36fd-83f9-430e-b61f-1a27d907d613\" (UID: \"3ecb36fd-83f9-430e-b61f-1a27d907d613\") " Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.801036 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ecb36fd-83f9-430e-b61f-1a27d907d613-config-data\") pod \"3ecb36fd-83f9-430e-b61f-1a27d907d613\" (UID: \"3ecb36fd-83f9-430e-b61f-1a27d907d613\") " Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.806631 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ecb36fd-83f9-430e-b61f-1a27d907d613-kube-api-access-lgf8x" (OuterVolumeSpecName: "kube-api-access-lgf8x") pod "3ecb36fd-83f9-430e-b61f-1a27d907d613" (UID: "3ecb36fd-83f9-430e-b61f-1a27d907d613"). InnerVolumeSpecName "kube-api-access-lgf8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.827390 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ecb36fd-83f9-430e-b61f-1a27d907d613-config-data" (OuterVolumeSpecName: "config-data") pod "3ecb36fd-83f9-430e-b61f-1a27d907d613" (UID: "3ecb36fd-83f9-430e-b61f-1a27d907d613"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.902836 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgf8x\" (UniqueName: \"kubernetes.io/projected/3ecb36fd-83f9-430e-b61f-1a27d907d613-kube-api-access-lgf8x\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:45 crc kubenswrapper[4711]: I0123 08:52:45.902874 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ecb36fd-83f9-430e-b61f-1a27d907d613-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.053336 4711 generic.go:334] "Generic (PLEG): container finished" podID="10576897-3cea-4c76-b889-ce7edb150d32" containerID="40e825ea9a6e96598613640ec00e4d72940c4231e278b40662a68b219879df50" exitCode=0 Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.053484 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" event={"ID":"10576897-3cea-4c76-b889-ce7edb150d32","Type":"ContainerDied","Data":"40e825ea9a6e96598613640ec00e4d72940c4231e278b40662a68b219879df50"} Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.053688 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" event={"ID":"10576897-3cea-4c76-b889-ce7edb150d32","Type":"ContainerStarted","Data":"57b7e254af0616f74cba9ba1511720fdc864b0bc7bfa374718a2c2b436ca7336"} Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.055184 4711 generic.go:334] "Generic (PLEG): container finished" podID="25fd533a-6c53-4c39-8dff-3d3511f89d5d" containerID="86ab0618d146f5612d3cf08cb93f536e357bf5978c2133b564e58af5cdc2f575" exitCode=0 Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.055293 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" event={"ID":"25fd533a-6c53-4c39-8dff-3d3511f89d5d","Type":"ContainerDied","Data":"86ab0618d146f5612d3cf08cb93f536e357bf5978c2133b564e58af5cdc2f575"} Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.055313 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" event={"ID":"25fd533a-6c53-4c39-8dff-3d3511f89d5d","Type":"ContainerStarted","Data":"fcec6159cc64fe2b7b90404a26ef9d50509050d82526038bc91ffedc5b2a7de2"} Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.056541 4711 generic.go:334] "Generic (PLEG): container finished" podID="0bf3a05e-119e-459b-b2af-0a2dece0ba15" containerID="5403d5d5a2e294ac2483a6bf2592295e3dd72bde46af8f490c41be40e8ac299a" exitCode=0 Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.056612 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" event={"ID":"0bf3a05e-119e-459b-b2af-0a2dece0ba15","Type":"ContainerDied","Data":"5403d5d5a2e294ac2483a6bf2592295e3dd72bde46af8f490c41be40e8ac299a"} Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.056646 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" event={"ID":"0bf3a05e-119e-459b-b2af-0a2dece0ba15","Type":"ContainerStarted","Data":"c2595b6f9cb1fddfde1ac04b64afbb570d3db09e5cbbb956619c9aecdf9e6330"} Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.058390 4711 generic.go:334] "Generic (PLEG): container finished" podID="3ecb36fd-83f9-430e-b61f-1a27d907d613" containerID="eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4" exitCode=0 Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.058433 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.058465 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"3ecb36fd-83f9-430e-b61f-1a27d907d613","Type":"ContainerDied","Data":"eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4"} Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.058520 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"3ecb36fd-83f9-430e-b61f-1a27d907d613","Type":"ContainerDied","Data":"0d9c25cb21fa3032b5b405a0aa8455575b34f14a50acff25ab92cecebd737ec7"} Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.058543 4711 scope.go:117] "RemoveContainer" containerID="eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4" Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.058882 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" containerID="cri-o://32507dac522cc1cee510b7d3f46eae113739378b0c8ad7d725ea6ff76140718c" gracePeriod=30 Jan 23 08:52:46 crc kubenswrapper[4711]: E0123 08:52:46.080526 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7c16e17c62978f75ae1e7474d4b1139142bff92e2f56ee17cb8e14dd0d169eb1" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:52:46 crc kubenswrapper[4711]: E0123 08:52:46.081776 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7c16e17c62978f75ae1e7474d4b1139142bff92e2f56ee17cb8e14dd0d169eb1" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:52:46 crc kubenswrapper[4711]: E0123 08:52:46.083607 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7c16e17c62978f75ae1e7474d4b1139142bff92e2f56ee17cb8e14dd0d169eb1" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:52:46 crc kubenswrapper[4711]: E0123 08:52:46.084239 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" podUID="ed0b0d47-c436-47b1-ad07-719de0986cb0" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.086021 4711 scope.go:117] "RemoveContainer" containerID="eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4" Jan 23 08:52:46 crc kubenswrapper[4711]: E0123 08:52:46.087040 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4\": container with ID starting with eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4 not found: ID does not exist" containerID="eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4" Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.087079 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4"} err="failed to get container status \"eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4\": rpc error: code = NotFound desc = could not find container \"eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4\": container with ID starting with eb42132e37a984656fedb31f5e43b92c179a833aa5115b59474c23bcfbeb3ce4 not found: ID does not exist" Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.126176 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:52:46 crc kubenswrapper[4711]: I0123 08:52:46.135141 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:52:46 crc kubenswrapper[4711]: E0123 08:52:46.177165 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1242a681f3dbcaf5350cde7b252dddd25360bfa31b2cd77fc07a6680a337d459" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:52:46 crc kubenswrapper[4711]: E0123 08:52:46.178490 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1242a681f3dbcaf5350cde7b252dddd25360bfa31b2cd77fc07a6680a337d459" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:52:46 crc kubenswrapper[4711]: E0123 08:52:46.179663 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1242a681f3dbcaf5350cde7b252dddd25360bfa31b2cd77fc07a6680a337d459" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:52:46 crc kubenswrapper[4711]: E0123 08:52:46.179722 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="d09491b5-bdb3-459c-a33b-0eae7f2b6f9c" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:52:46 crc kubenswrapper[4711]: E0123 08:52:46.612384 4711 secret.go:188] Couldn't get secret nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-config-data: secret "nova-kuttl-cell1-compute-fake1-compute-config-data" not found Jan 23 08:52:46 crc kubenswrapper[4711]: E0123 08:52:46.612466 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data podName:d76b20ef-bbfc-4399-86bc-7a69eeafa479 nodeName:}" failed. No retries permitted until 2026-01-23 08:52:48.612451074 +0000 UTC m=+1954.185407522 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data") pod "nova-kuttl-cell1-compute-fake1-compute-0" (UID: "d76b20ef-bbfc-4399-86bc-7a69eeafa479") : secret "nova-kuttl-cell1-compute-fake1-compute-config-data" not found Jan 23 08:52:47 crc kubenswrapper[4711]: E0123 08:52:47.179028 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:52:47 crc kubenswrapper[4711]: E0123 08:52:47.180424 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:52:47 crc kubenswrapper[4711]: E0123 08:52:47.181448 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 23 08:52:47 crc kubenswrapper[4711]: E0123 08:52:47.181492 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" podUID="03a26174-c17e-4e64-a30d-e1ddccee512c" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.485092 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ecb36fd-83f9-430e-b61f-1a27d907d613" path="/var/lib/kubelet/pods/3ecb36fd-83f9-430e-b61f-1a27d907d613/volumes" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.497477 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.509347 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.510685 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.631074 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tz8l\" (UniqueName: \"kubernetes.io/projected/25fd533a-6c53-4c39-8dff-3d3511f89d5d-kube-api-access-9tz8l\") pod \"25fd533a-6c53-4c39-8dff-3d3511f89d5d\" (UID: \"25fd533a-6c53-4c39-8dff-3d3511f89d5d\") " Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.631226 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmct5\" (UniqueName: \"kubernetes.io/projected/0bf3a05e-119e-459b-b2af-0a2dece0ba15-kube-api-access-cmct5\") pod \"0bf3a05e-119e-459b-b2af-0a2dece0ba15\" (UID: \"0bf3a05e-119e-459b-b2af-0a2dece0ba15\") " Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.631252 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bf3a05e-119e-459b-b2af-0a2dece0ba15-operator-scripts\") pod \"0bf3a05e-119e-459b-b2af-0a2dece0ba15\" (UID: \"0bf3a05e-119e-459b-b2af-0a2dece0ba15\") " Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.631289 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87qpc\" (UniqueName: \"kubernetes.io/projected/10576897-3cea-4c76-b889-ce7edb150d32-kube-api-access-87qpc\") pod \"10576897-3cea-4c76-b889-ce7edb150d32\" (UID: \"10576897-3cea-4c76-b889-ce7edb150d32\") " Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.631360 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10576897-3cea-4c76-b889-ce7edb150d32-operator-scripts\") pod \"10576897-3cea-4c76-b889-ce7edb150d32\" (UID: \"10576897-3cea-4c76-b889-ce7edb150d32\") " Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.631412 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25fd533a-6c53-4c39-8dff-3d3511f89d5d-operator-scripts\") pod \"25fd533a-6c53-4c39-8dff-3d3511f89d5d\" (UID: \"25fd533a-6c53-4c39-8dff-3d3511f89d5d\") " Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.632099 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10576897-3cea-4c76-b889-ce7edb150d32-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "10576897-3cea-4c76-b889-ce7edb150d32" (UID: "10576897-3cea-4c76-b889-ce7edb150d32"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.632485 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bf3a05e-119e-459b-b2af-0a2dece0ba15-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0bf3a05e-119e-459b-b2af-0a2dece0ba15" (UID: "0bf3a05e-119e-459b-b2af-0a2dece0ba15"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.632695 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25fd533a-6c53-4c39-8dff-3d3511f89d5d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "25fd533a-6c53-4c39-8dff-3d3511f89d5d" (UID: "25fd533a-6c53-4c39-8dff-3d3511f89d5d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.638408 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25fd533a-6c53-4c39-8dff-3d3511f89d5d-kube-api-access-9tz8l" (OuterVolumeSpecName: "kube-api-access-9tz8l") pod "25fd533a-6c53-4c39-8dff-3d3511f89d5d" (UID: "25fd533a-6c53-4c39-8dff-3d3511f89d5d"). InnerVolumeSpecName "kube-api-access-9tz8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.638498 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bf3a05e-119e-459b-b2af-0a2dece0ba15-kube-api-access-cmct5" (OuterVolumeSpecName: "kube-api-access-cmct5") pod "0bf3a05e-119e-459b-b2af-0a2dece0ba15" (UID: "0bf3a05e-119e-459b-b2af-0a2dece0ba15"). InnerVolumeSpecName "kube-api-access-cmct5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.638576 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10576897-3cea-4c76-b889-ce7edb150d32-kube-api-access-87qpc" (OuterVolumeSpecName: "kube-api-access-87qpc") pod "10576897-3cea-4c76-b889-ce7edb150d32" (UID: "10576897-3cea-4c76-b889-ce7edb150d32"). InnerVolumeSpecName "kube-api-access-87qpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.733478 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmct5\" (UniqueName: \"kubernetes.io/projected/0bf3a05e-119e-459b-b2af-0a2dece0ba15-kube-api-access-cmct5\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.733524 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bf3a05e-119e-459b-b2af-0a2dece0ba15-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.733534 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87qpc\" (UniqueName: \"kubernetes.io/projected/10576897-3cea-4c76-b889-ce7edb150d32-kube-api-access-87qpc\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.733542 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10576897-3cea-4c76-b889-ce7edb150d32-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.733551 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25fd533a-6c53-4c39-8dff-3d3511f89d5d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:47 crc kubenswrapper[4711]: I0123 08:52:47.733559 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tz8l\" (UniqueName: \"kubernetes.io/projected/25fd533a-6c53-4c39-8dff-3d3511f89d5d-kube-api-access-9tz8l\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.002039 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.205:8775/\": read tcp 10.217.0.2:42346->10.217.0.205:8775: read: connection reset by peer" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.002064 4711 prober.go:107] "Probe failed" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.205:8775/\": read tcp 10.217.0.2:42352->10.217.0.205:8775: read: connection reset by peer" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.076067 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" event={"ID":"10576897-3cea-4c76-b889-ce7edb150d32","Type":"ContainerDied","Data":"57b7e254af0616f74cba9ba1511720fdc864b0bc7bfa374718a2c2b436ca7336"} Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.076104 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57b7e254af0616f74cba9ba1511720fdc864b0bc7bfa374718a2c2b436ca7336" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.076192 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novaapi1145-account-delete-kh8nx" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.086220 4711 generic.go:334] "Generic (PLEG): container finished" podID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" containerID="aff96ca15a3679a9408c5c8fe00137e0a3440055a2e619dc8a346f5c1bb8398d" exitCode=0 Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.086283 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"d194640a-7e19-4627-8671-ebd9bb1eeaa5","Type":"ContainerDied","Data":"aff96ca15a3679a9408c5c8fe00137e0a3440055a2e619dc8a346f5c1bb8398d"} Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.087430 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" event={"ID":"25fd533a-6c53-4c39-8dff-3d3511f89d5d","Type":"ContainerDied","Data":"fcec6159cc64fe2b7b90404a26ef9d50509050d82526038bc91ffedc5b2a7de2"} Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.087475 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcec6159cc64fe2b7b90404a26ef9d50509050d82526038bc91ffedc5b2a7de2" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.087452 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell0c1fc-account-delete-lvmdq" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.088428 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" event={"ID":"0bf3a05e-119e-459b-b2af-0a2dece0ba15","Type":"ContainerDied","Data":"c2595b6f9cb1fddfde1ac04b64afbb570d3db09e5cbbb956619c9aecdf9e6330"} Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.088537 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2595b6f9cb1fddfde1ac04b64afbb570d3db09e5cbbb956619c9aecdf9e6330" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.088645 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/novacell1a2a4-account-delete-4x9qk" Jan 23 08:52:48 crc kubenswrapper[4711]: E0123 08:52:48.650873 4711 secret.go:188] Couldn't get secret nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-config-data: secret "nova-kuttl-cell1-compute-fake1-compute-config-data" not found Jan 23 08:52:48 crc kubenswrapper[4711]: E0123 08:52:48.650975 4711 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data podName:d76b20ef-bbfc-4399-86bc-7a69eeafa479 nodeName:}" failed. No retries permitted until 2026-01-23 08:52:52.650950497 +0000 UTC m=+1958.223906865 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data") pod "nova-kuttl-cell1-compute-fake1-compute-0" (UID: "d76b20ef-bbfc-4399-86bc-7a69eeafa479") : secret "nova-kuttl-cell1-compute-fake1-compute-config-data" not found Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.661973 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.754172 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d194640a-7e19-4627-8671-ebd9bb1eeaa5-logs\") pod \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\" (UID: \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\") " Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.754315 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8lpk\" (UniqueName: \"kubernetes.io/projected/d194640a-7e19-4627-8671-ebd9bb1eeaa5-kube-api-access-k8lpk\") pod \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\" (UID: \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\") " Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.754382 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d194640a-7e19-4627-8671-ebd9bb1eeaa5-config-data\") pod \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\" (UID: \"d194640a-7e19-4627-8671-ebd9bb1eeaa5\") " Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.755263 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d194640a-7e19-4627-8671-ebd9bb1eeaa5-logs" (OuterVolumeSpecName: "logs") pod "d194640a-7e19-4627-8671-ebd9bb1eeaa5" (UID: "d194640a-7e19-4627-8671-ebd9bb1eeaa5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.765988 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d194640a-7e19-4627-8671-ebd9bb1eeaa5-kube-api-access-k8lpk" (OuterVolumeSpecName: "kube-api-access-k8lpk") pod "d194640a-7e19-4627-8671-ebd9bb1eeaa5" (UID: "d194640a-7e19-4627-8671-ebd9bb1eeaa5"). InnerVolumeSpecName "kube-api-access-k8lpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.806907 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d194640a-7e19-4627-8671-ebd9bb1eeaa5-config-data" (OuterVolumeSpecName: "config-data") pod "d194640a-7e19-4627-8671-ebd9bb1eeaa5" (UID: "d194640a-7e19-4627-8671-ebd9bb1eeaa5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.856418 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d194640a-7e19-4627-8671-ebd9bb1eeaa5-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.856448 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8lpk\" (UniqueName: \"kubernetes.io/projected/d194640a-7e19-4627-8671-ebd9bb1eeaa5-kube-api-access-k8lpk\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.856460 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d194640a-7e19-4627-8671-ebd9bb1eeaa5-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:48 crc kubenswrapper[4711]: I0123 08:52:48.996245 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.108445 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"d194640a-7e19-4627-8671-ebd9bb1eeaa5","Type":"ContainerDied","Data":"97ae47c32e5d284f6ed9294b9925526b55d8882853d45975aefd1a363a14a680"} Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.108470 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.108548 4711 scope.go:117] "RemoveContainer" containerID="aff96ca15a3679a9408c5c8fe00137e0a3440055a2e619dc8a346f5c1bb8398d" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.113192 4711 generic.go:334] "Generic (PLEG): container finished" podID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerID="12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818" exitCode=0 Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.113255 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"35f8100a-5b47-4709-8005-06fd54d0da7a","Type":"ContainerDied","Data":"12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818"} Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.113296 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"35f8100a-5b47-4709-8005-06fd54d0da7a","Type":"ContainerDied","Data":"c5c56d03951f53ca34b201c6906a311454c347203c237e9f6b1f1aac93ba09cf"} Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.113234 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.118000 4711 generic.go:334] "Generic (PLEG): container finished" podID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerID="32507dac522cc1cee510b7d3f46eae113739378b0c8ad7d725ea6ff76140718c" exitCode=1 Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.118064 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" event={"ID":"d76b20ef-bbfc-4399-86bc-7a69eeafa479","Type":"ContainerDied","Data":"32507dac522cc1cee510b7d3f46eae113739378b0c8ad7d725ea6ff76140718c"} Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.119842 4711 generic.go:334] "Generic (PLEG): container finished" podID="d09491b5-bdb3-459c-a33b-0eae7f2b6f9c" containerID="1242a681f3dbcaf5350cde7b252dddd25360bfa31b2cd77fc07a6680a337d459" exitCode=0 Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.119891 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c","Type":"ContainerDied","Data":"1242a681f3dbcaf5350cde7b252dddd25360bfa31b2cd77fc07a6680a337d459"} Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.122357 4711 generic.go:334] "Generic (PLEG): container finished" podID="ed0b0d47-c436-47b1-ad07-719de0986cb0" containerID="7c16e17c62978f75ae1e7474d4b1139142bff92e2f56ee17cb8e14dd0d169eb1" exitCode=0 Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.122422 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"ed0b0d47-c436-47b1-ad07-719de0986cb0","Type":"ContainerDied","Data":"7c16e17c62978f75ae1e7474d4b1139142bff92e2f56ee17cb8e14dd0d169eb1"} Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.146469 4711 scope.go:117] "RemoveContainer" containerID="074d94d05775b76c50f474482b56a94bf8794148b41b844ae9116888639e7815" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.157178 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.161470 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35f8100a-5b47-4709-8005-06fd54d0da7a-logs\") pod \"35f8100a-5b47-4709-8005-06fd54d0da7a\" (UID: \"35f8100a-5b47-4709-8005-06fd54d0da7a\") " Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.161522 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35f8100a-5b47-4709-8005-06fd54d0da7a-config-data\") pod \"35f8100a-5b47-4709-8005-06fd54d0da7a\" (UID: \"35f8100a-5b47-4709-8005-06fd54d0da7a\") " Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.161547 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqjzb\" (UniqueName: \"kubernetes.io/projected/35f8100a-5b47-4709-8005-06fd54d0da7a-kube-api-access-lqjzb\") pod \"35f8100a-5b47-4709-8005-06fd54d0da7a\" (UID: \"35f8100a-5b47-4709-8005-06fd54d0da7a\") " Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.162196 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35f8100a-5b47-4709-8005-06fd54d0da7a-logs" (OuterVolumeSpecName: "logs") pod "35f8100a-5b47-4709-8005-06fd54d0da7a" (UID: "35f8100a-5b47-4709-8005-06fd54d0da7a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.165576 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.166098 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35f8100a-5b47-4709-8005-06fd54d0da7a-kube-api-access-lqjzb" (OuterVolumeSpecName: "kube-api-access-lqjzb") pod "35f8100a-5b47-4709-8005-06fd54d0da7a" (UID: "35f8100a-5b47-4709-8005-06fd54d0da7a"). InnerVolumeSpecName "kube-api-access-lqjzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.168709 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:49 crc kubenswrapper[4711]: E0123 08:52:49.177343 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 32507dac522cc1cee510b7d3f46eae113739378b0c8ad7d725ea6ff76140718c is running failed: container process not found" containerID="32507dac522cc1cee510b7d3f46eae113739378b0c8ad7d725ea6ff76140718c" cmd=["/usr/bin/pgrep","-r","DRST","nova-compute"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.177607 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:49 crc kubenswrapper[4711]: E0123 08:52:49.177749 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 32507dac522cc1cee510b7d3f46eae113739378b0c8ad7d725ea6ff76140718c is running failed: container process not found" containerID="32507dac522cc1cee510b7d3f46eae113739378b0c8ad7d725ea6ff76140718c" cmd=["/usr/bin/pgrep","-r","DRST","nova-compute"] Jan 23 08:52:49 crc kubenswrapper[4711]: E0123 08:52:49.178095 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 32507dac522cc1cee510b7d3f46eae113739378b0c8ad7d725ea6ff76140718c is running failed: container process not found" containerID="32507dac522cc1cee510b7d3f46eae113739378b0c8ad7d725ea6ff76140718c" cmd=["/usr/bin/pgrep","-r","DRST","nova-compute"] Jan 23 08:52:49 crc kubenswrapper[4711]: E0123 08:52:49.178198 4711 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 32507dac522cc1cee510b7d3f46eae113739378b0c8ad7d725ea6ff76140718c is running failed: container process not found" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.182047 4711 scope.go:117] "RemoveContainer" containerID="12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.188126 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.200853 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35f8100a-5b47-4709-8005-06fd54d0da7a-config-data" (OuterVolumeSpecName: "config-data") pod "35f8100a-5b47-4709-8005-06fd54d0da7a" (UID: "35f8100a-5b47-4709-8005-06fd54d0da7a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.218378 4711 scope.go:117] "RemoveContainer" containerID="f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.235387 4711 scope.go:117] "RemoveContainer" containerID="12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818" Jan 23 08:52:49 crc kubenswrapper[4711]: E0123 08:52:49.236000 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818\": container with ID starting with 12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818 not found: ID does not exist" containerID="12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.236052 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818"} err="failed to get container status \"12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818\": rpc error: code = NotFound desc = could not find container \"12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818\": container with ID starting with 12bd411285265e07e16996600d0d81e4f293eb29274b04e702f2e3b54e271818 not found: ID does not exist" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.236080 4711 scope.go:117] "RemoveContainer" containerID="f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07" Jan 23 08:52:49 crc kubenswrapper[4711]: E0123 08:52:49.236587 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07\": container with ID starting with f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07 not found: ID does not exist" containerID="f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.236637 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07"} err="failed to get container status \"f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07\": rpc error: code = NotFound desc = could not find container \"f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07\": container with ID starting with f98504a33d29c5a17d3bc5297b768bd8ce8df9a52756e317762477cec225dc07 not found: ID does not exist" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.236669 4711 scope.go:117] "RemoveContainer" containerID="87b0a1dfa6d0ea7a3c96fbf0c314ea8ce343511692fcf64c91dbc607517d0109" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.262531 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c-config-data\") pod \"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c\" (UID: \"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c\") " Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.262899 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0b0d47-c436-47b1-ad07-719de0986cb0-config-data\") pod \"ed0b0d47-c436-47b1-ad07-719de0986cb0\" (UID: \"ed0b0d47-c436-47b1-ad07-719de0986cb0\") " Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.263013 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hc485\" (UniqueName: \"kubernetes.io/projected/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c-kube-api-access-hc485\") pod \"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c\" (UID: \"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c\") " Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.263196 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gq7zg\" (UniqueName: \"kubernetes.io/projected/ed0b0d47-c436-47b1-ad07-719de0986cb0-kube-api-access-gq7zg\") pod \"ed0b0d47-c436-47b1-ad07-719de0986cb0\" (UID: \"ed0b0d47-c436-47b1-ad07-719de0986cb0\") " Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.263668 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35f8100a-5b47-4709-8005-06fd54d0da7a-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.263782 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35f8100a-5b47-4709-8005-06fd54d0da7a-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.263858 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqjzb\" (UniqueName: \"kubernetes.io/projected/35f8100a-5b47-4709-8005-06fd54d0da7a-kube-api-access-lqjzb\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.269637 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed0b0d47-c436-47b1-ad07-719de0986cb0-kube-api-access-gq7zg" (OuterVolumeSpecName: "kube-api-access-gq7zg") pod "ed0b0d47-c436-47b1-ad07-719de0986cb0" (UID: "ed0b0d47-c436-47b1-ad07-719de0986cb0"). InnerVolumeSpecName "kube-api-access-gq7zg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.272427 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c-kube-api-access-hc485" (OuterVolumeSpecName: "kube-api-access-hc485") pod "d09491b5-bdb3-459c-a33b-0eae7f2b6f9c" (UID: "d09491b5-bdb3-459c-a33b-0eae7f2b6f9c"). InnerVolumeSpecName "kube-api-access-hc485". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.284084 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed0b0d47-c436-47b1-ad07-719de0986cb0-config-data" (OuterVolumeSpecName: "config-data") pod "ed0b0d47-c436-47b1-ad07-719de0986cb0" (UID: "ed0b0d47-c436-47b1-ad07-719de0986cb0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.284929 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c-config-data" (OuterVolumeSpecName: "config-data") pod "d09491b5-bdb3-459c-a33b-0eae7f2b6f9c" (UID: "d09491b5-bdb3-459c-a33b-0eae7f2b6f9c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.364697 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc789\" (UniqueName: \"kubernetes.io/projected/d76b20ef-bbfc-4399-86bc-7a69eeafa479-kube-api-access-fc789\") pod \"d76b20ef-bbfc-4399-86bc-7a69eeafa479\" (UID: \"d76b20ef-bbfc-4399-86bc-7a69eeafa479\") " Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.364761 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data\") pod \"d76b20ef-bbfc-4399-86bc-7a69eeafa479\" (UID: \"d76b20ef-bbfc-4399-86bc-7a69eeafa479\") " Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.365148 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gq7zg\" (UniqueName: \"kubernetes.io/projected/ed0b0d47-c436-47b1-ad07-719de0986cb0-kube-api-access-gq7zg\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.365176 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.365192 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed0b0d47-c436-47b1-ad07-719de0986cb0-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.365205 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hc485\" (UniqueName: \"kubernetes.io/projected/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c-kube-api-access-hc485\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.368631 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d76b20ef-bbfc-4399-86bc-7a69eeafa479-kube-api-access-fc789" (OuterVolumeSpecName: "kube-api-access-fc789") pod "d76b20ef-bbfc-4399-86bc-7a69eeafa479" (UID: "d76b20ef-bbfc-4399-86bc-7a69eeafa479"). InnerVolumeSpecName "kube-api-access-fc789". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.383419 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data" (OuterVolumeSpecName: "config-data") pod "d76b20ef-bbfc-4399-86bc-7a69eeafa479" (UID: "d76b20ef-bbfc-4399-86bc-7a69eeafa479"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.470348 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.470876 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d76b20ef-bbfc-4399-86bc-7a69eeafa479-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.470905 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc789\" (UniqueName: \"kubernetes.io/projected/d76b20ef-bbfc-4399-86bc-7a69eeafa479-kube-api-access-fc789\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.487683 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" path="/var/lib/kubelet/pods/d194640a-7e19-4627-8671-ebd9bb1eeaa5/volumes" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.489158 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.592965 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-bshss"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.605758 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-bshss"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.612386 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.618934 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/novacell1a2a4-account-delete-4x9qk"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.624568 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/novacell1a2a4-account-delete-4x9qk"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.634424 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell1-a2a4-account-create-update-sf2bt"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.678248 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-l6c9k"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.696666 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-l6c9k"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.703197 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/novacell0c1fc-account-delete-lvmdq"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.703693 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.710199 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.716646 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/novacell0c1fc-account-delete-lvmdq"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.724349 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell0-c1fc-account-create-update-ddw8f"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.779274 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-api-db-create-xsmh6"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.803341 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-api-db-create-xsmh6"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.812411 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/novaapi1145-account-delete-kh8nx"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.825087 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/novaapi1145-account-delete-kh8nx"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.834634 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-api-1145-account-create-update-pjbbf"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.841337 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-api-1145-account-create-update-pjbbf"] Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.875971 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03a26174-c17e-4e64-a30d-e1ddccee512c-config-data\") pod \"03a26174-c17e-4e64-a30d-e1ddccee512c\" (UID: \"03a26174-c17e-4e64-a30d-e1ddccee512c\") " Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.876050 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rf9hn\" (UniqueName: \"kubernetes.io/projected/03a26174-c17e-4e64-a30d-e1ddccee512c-kube-api-access-rf9hn\") pod \"03a26174-c17e-4e64-a30d-e1ddccee512c\" (UID: \"03a26174-c17e-4e64-a30d-e1ddccee512c\") " Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.886817 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03a26174-c17e-4e64-a30d-e1ddccee512c-kube-api-access-rf9hn" (OuterVolumeSpecName: "kube-api-access-rf9hn") pod "03a26174-c17e-4e64-a30d-e1ddccee512c" (UID: "03a26174-c17e-4e64-a30d-e1ddccee512c"). InnerVolumeSpecName "kube-api-access-rf9hn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.898227 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03a26174-c17e-4e64-a30d-e1ddccee512c-config-data" (OuterVolumeSpecName: "config-data") pod "03a26174-c17e-4e64-a30d-e1ddccee512c" (UID: "03a26174-c17e-4e64-a30d-e1ddccee512c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.977203 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03a26174-c17e-4e64-a30d-e1ddccee512c-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:49 crc kubenswrapper[4711]: I0123 08:52:49.977246 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rf9hn\" (UniqueName: \"kubernetes.io/projected/03a26174-c17e-4e64-a30d-e1ddccee512c-kube-api-access-rf9hn\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.134385 4711 generic.go:334] "Generic (PLEG): container finished" podID="03a26174-c17e-4e64-a30d-e1ddccee512c" containerID="b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d" exitCode=0 Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.134455 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.134455 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"03a26174-c17e-4e64-a30d-e1ddccee512c","Type":"ContainerDied","Data":"b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d"} Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.134598 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"03a26174-c17e-4e64-a30d-e1ddccee512c","Type":"ContainerDied","Data":"3df6015660c605ea31e1627b9013c21ea0175561f10e31f5d8e498b759317ae7"} Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.134640 4711 scope.go:117] "RemoveContainer" containerID="b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d" Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.136919 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" event={"ID":"d76b20ef-bbfc-4399-86bc-7a69eeafa479","Type":"ContainerDied","Data":"91183da4b4cf87cb06303d0a2de3022dfe286118a23e160658b3f932d71c39c4"} Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.137038 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0" Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.140823 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"d09491b5-bdb3-459c-a33b-0eae7f2b6f9c","Type":"ContainerDied","Data":"969c0c76ebcaf5e8ffdf2102e4b341fd0b09f9d7d00e239b124aa9a5561562c6"} Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.140918 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.143863 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"ed0b0d47-c436-47b1-ad07-719de0986cb0","Type":"ContainerDied","Data":"7551d54fb2d595ab2cb06887df78e176ac75c0f247958381e44b75c5f9473915"} Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.143923 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.156356 4711 scope.go:117] "RemoveContainer" containerID="b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d" Jan 23 08:52:50 crc kubenswrapper[4711]: E0123 08:52:50.166044 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d\": container with ID starting with b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d not found: ID does not exist" containerID="b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d" Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.166097 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d"} err="failed to get container status \"b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d\": rpc error: code = NotFound desc = could not find container \"b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d\": container with ID starting with b03772d4f000db00a31fd4cd0d74fd26c7b67ab6ac5eecd32b0b2e528892b58d not found: ID does not exist" Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.166128 4711 scope.go:117] "RemoveContainer" containerID="32507dac522cc1cee510b7d3f46eae113739378b0c8ad7d725ea6ff76140718c" Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.190395 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.199341 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.207340 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0"] Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.215732 4711 scope.go:117] "RemoveContainer" containerID="1242a681f3dbcaf5350cde7b252dddd25360bfa31b2cd77fc07a6680a337d459" Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.218454 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-compute-fake1-compute-0"] Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.225011 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.231316 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.242932 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.245582 4711 scope.go:117] "RemoveContainer" containerID="7c16e17c62978f75ae1e7474d4b1139142bff92e2f56ee17cb8e14dd0d169eb1" Jan 23 08:52:50 crc kubenswrapper[4711]: I0123 08:52:50.248574 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.482296 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03a26174-c17e-4e64-a30d-e1ddccee512c" path="/var/lib/kubelet/pods/03a26174-c17e-4e64-a30d-e1ddccee512c/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.482826 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0adb282a-52af-4813-970c-207e19ea7350" path="/var/lib/kubelet/pods/0adb282a-52af-4813-970c-207e19ea7350/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.483314 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bf3a05e-119e-459b-b2af-0a2dece0ba15" path="/var/lib/kubelet/pods/0bf3a05e-119e-459b-b2af-0a2dece0ba15/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.483786 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cab7ffb-b293-4bc0-9334-670b79f37e44" path="/var/lib/kubelet/pods/0cab7ffb-b293-4bc0-9334-670b79f37e44/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.484662 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0edf7ac9-2f2a-408b-ab29-a7adf16656ba" path="/var/lib/kubelet/pods/0edf7ac9-2f2a-408b-ab29-a7adf16656ba/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.485101 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10576897-3cea-4c76-b889-ce7edb150d32" path="/var/lib/kubelet/pods/10576897-3cea-4c76-b889-ce7edb150d32/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.485542 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25fd533a-6c53-4c39-8dff-3d3511f89d5d" path="/var/lib/kubelet/pods/25fd533a-6c53-4c39-8dff-3d3511f89d5d/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.486631 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" path="/var/lib/kubelet/pods/35f8100a-5b47-4709-8005-06fd54d0da7a/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.487244 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65cf67a7-9978-446e-a442-4d5aca0072cb" path="/var/lib/kubelet/pods/65cf67a7-9978-446e-a442-4d5aca0072cb/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.487754 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7c00bbe-ae45-418c-8c02-d375fc28602f" path="/var/lib/kubelet/pods/c7c00bbe-ae45-418c-8c02-d375fc28602f/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.488678 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d09491b5-bdb3-459c-a33b-0eae7f2b6f9c" path="/var/lib/kubelet/pods/d09491b5-bdb3-459c-a33b-0eae7f2b6f9c/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.489133 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" path="/var/lib/kubelet/pods/d76b20ef-bbfc-4399-86bc-7a69eeafa479/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.489699 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed0b0d47-c436-47b1-ad07-719de0986cb0" path="/var/lib/kubelet/pods/ed0b0d47-c436-47b1-ad07-719de0986cb0/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.490839 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdf0b61a-9832-45f5-8e29-6d63dd239381" path="/var/lib/kubelet/pods/fdf0b61a-9832-45f5-8e29-6d63dd239381/volumes" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717363 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-api-db-create-rtqsh"] Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717680 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerName="nova-kuttl-metadata-metadata" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717696 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerName="nova-kuttl-metadata-metadata" Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717709 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717716 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717725 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717732 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717743 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" containerName="nova-kuttl-api-api" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717749 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" containerName="nova-kuttl-api-api" Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717759 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d09491b5-bdb3-459c-a33b-0eae7f2b6f9c" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717765 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d09491b5-bdb3-459c-a33b-0eae7f2b6f9c" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717774 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10576897-3cea-4c76-b889-ce7edb150d32" containerName="mariadb-account-delete" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717780 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="10576897-3cea-4c76-b889-ce7edb150d32" containerName="mariadb-account-delete" Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717790 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerName="nova-kuttl-metadata-log" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717795 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerName="nova-kuttl-metadata-log" Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717806 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed0b0d47-c436-47b1-ad07-719de0986cb0" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717811 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed0b0d47-c436-47b1-ad07-719de0986cb0" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717820 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25fd533a-6c53-4c39-8dff-3d3511f89d5d" containerName="mariadb-account-delete" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717826 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="25fd533a-6c53-4c39-8dff-3d3511f89d5d" containerName="mariadb-account-delete" Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717837 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bf3a05e-119e-459b-b2af-0a2dece0ba15" containerName="mariadb-account-delete" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717844 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bf3a05e-119e-459b-b2af-0a2dece0ba15" containerName="mariadb-account-delete" Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717853 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ecb36fd-83f9-430e-b61f-1a27d907d613" containerName="nova-kuttl-cell1-novncproxy-novncproxy" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717859 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ecb36fd-83f9-430e-b61f-1a27d907d613" containerName="nova-kuttl-cell1-novncproxy-novncproxy" Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717869 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" containerName="nova-kuttl-api-log" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717875 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" containerName="nova-kuttl-api-log" Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.717881 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03a26174-c17e-4e64-a30d-e1ddccee512c" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.717887 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="03a26174-c17e-4e64-a30d-e1ddccee512c" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718018 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718027 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" containerName="nova-kuttl-api-log" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718039 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerName="nova-kuttl-metadata-log" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718047 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bf3a05e-119e-459b-b2af-0a2dece0ba15" containerName="mariadb-account-delete" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718056 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718065 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="03a26174-c17e-4e64-a30d-e1ddccee512c" containerName="nova-kuttl-cell0-conductor-conductor" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718070 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="25fd533a-6c53-4c39-8dff-3d3511f89d5d" containerName="mariadb-account-delete" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718078 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ecb36fd-83f9-430e-b61f-1a27d907d613" containerName="nova-kuttl-cell1-novncproxy-novncproxy" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718089 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="10576897-3cea-4c76-b889-ce7edb150d32" containerName="mariadb-account-delete" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718097 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed0b0d47-c436-47b1-ad07-719de0986cb0" containerName="nova-kuttl-cell1-conductor-conductor" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718106 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d194640a-7e19-4627-8671-ebd9bb1eeaa5" containerName="nova-kuttl-api-api" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718118 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="35f8100a-5b47-4709-8005-06fd54d0da7a" containerName="nova-kuttl-metadata-metadata" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718126 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d09491b5-bdb3-459c-a33b-0eae7f2b6f9c" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.718658 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-rtqsh" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.726388 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-db-create-rtqsh"] Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.806432 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vh5lc\" (UniqueName: \"kubernetes.io/projected/b032059c-7845-4107-b676-1e1d66d18d16-kube-api-access-vh5lc\") pod \"nova-api-db-create-rtqsh\" (UID: \"b032059c-7845-4107-b676-1e1d66d18d16\") " pod="nova-kuttl-default/nova-api-db-create-rtqsh" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.806588 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b032059c-7845-4107-b676-1e1d66d18d16-operator-scripts\") pod \"nova-api-db-create-rtqsh\" (UID: \"b032059c-7845-4107-b676-1e1d66d18d16\") " pod="nova-kuttl-default/nova-api-db-create-rtqsh" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.822257 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-lh44q"] Jan 23 08:52:51 crc kubenswrapper[4711]: E0123 08:52:51.822713 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.822737 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.822908 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d76b20ef-bbfc-4399-86bc-7a69eeafa479" containerName="nova-kuttl-cell1-compute-fake1-compute-compute" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.823582 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-lh44q" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.837811 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-lh44q"] Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.908215 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b032059c-7845-4107-b676-1e1d66d18d16-operator-scripts\") pod \"nova-api-db-create-rtqsh\" (UID: \"b032059c-7845-4107-b676-1e1d66d18d16\") " pod="nova-kuttl-default/nova-api-db-create-rtqsh" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.908286 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk8f4\" (UniqueName: \"kubernetes.io/projected/8aaefd64-d6df-4ecb-bdb2-ed135a281f26-kube-api-access-gk8f4\") pod \"nova-cell0-db-create-lh44q\" (UID: \"8aaefd64-d6df-4ecb-bdb2-ed135a281f26\") " pod="nova-kuttl-default/nova-cell0-db-create-lh44q" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.908331 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vh5lc\" (UniqueName: \"kubernetes.io/projected/b032059c-7845-4107-b676-1e1d66d18d16-kube-api-access-vh5lc\") pod \"nova-api-db-create-rtqsh\" (UID: \"b032059c-7845-4107-b676-1e1d66d18d16\") " pod="nova-kuttl-default/nova-api-db-create-rtqsh" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.908353 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8aaefd64-d6df-4ecb-bdb2-ed135a281f26-operator-scripts\") pod \"nova-cell0-db-create-lh44q\" (UID: \"8aaefd64-d6df-4ecb-bdb2-ed135a281f26\") " pod="nova-kuttl-default/nova-cell0-db-create-lh44q" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.911334 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b032059c-7845-4107-b676-1e1d66d18d16-operator-scripts\") pod \"nova-api-db-create-rtqsh\" (UID: \"b032059c-7845-4107-b676-1e1d66d18d16\") " pod="nova-kuttl-default/nova-api-db-create-rtqsh" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.932431 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vh5lc\" (UniqueName: \"kubernetes.io/projected/b032059c-7845-4107-b676-1e1d66d18d16-kube-api-access-vh5lc\") pod \"nova-api-db-create-rtqsh\" (UID: \"b032059c-7845-4107-b676-1e1d66d18d16\") " pod="nova-kuttl-default/nova-api-db-create-rtqsh" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.938653 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-4q85d"] Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.939739 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-4q85d" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.944828 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5"] Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.945912 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.947645 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-api-db-secret" Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.951560 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5"] Jan 23 08:52:51 crc kubenswrapper[4711]: I0123 08:52:51.964920 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-4q85d"] Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.009476 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8aaefd64-d6df-4ecb-bdb2-ed135a281f26-operator-scripts\") pod \"nova-cell0-db-create-lh44q\" (UID: \"8aaefd64-d6df-4ecb-bdb2-ed135a281f26\") " pod="nova-kuttl-default/nova-cell0-db-create-lh44q" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.009639 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk8f4\" (UniqueName: \"kubernetes.io/projected/8aaefd64-d6df-4ecb-bdb2-ed135a281f26-kube-api-access-gk8f4\") pod \"nova-cell0-db-create-lh44q\" (UID: \"8aaefd64-d6df-4ecb-bdb2-ed135a281f26\") " pod="nova-kuttl-default/nova-cell0-db-create-lh44q" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.010372 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8aaefd64-d6df-4ecb-bdb2-ed135a281f26-operator-scripts\") pod \"nova-cell0-db-create-lh44q\" (UID: \"8aaefd64-d6df-4ecb-bdb2-ed135a281f26\") " pod="nova-kuttl-default/nova-cell0-db-create-lh44q" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.034144 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-rtqsh" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.035117 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk8f4\" (UniqueName: \"kubernetes.io/projected/8aaefd64-d6df-4ecb-bdb2-ed135a281f26-kube-api-access-gk8f4\") pod \"nova-cell0-db-create-lh44q\" (UID: \"8aaefd64-d6df-4ecb-bdb2-ed135a281f26\") " pod="nova-kuttl-default/nova-cell0-db-create-lh44q" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.111730 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvptl\" (UniqueName: \"kubernetes.io/projected/efaa3835-9199-4083-b21a-fe0513b1f665-kube-api-access-vvptl\") pod \"nova-api-30c6-account-create-update-jsrs5\" (UID: \"efaa3835-9199-4083-b21a-fe0513b1f665\") " pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.111882 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9mr6\" (UniqueName: \"kubernetes.io/projected/979e054b-e3c8-42e2-926f-49d0decb456c-kube-api-access-l9mr6\") pod \"nova-cell1-db-create-4q85d\" (UID: \"979e054b-e3c8-42e2-926f-49d0decb456c\") " pod="nova-kuttl-default/nova-cell1-db-create-4q85d" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.111916 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979e054b-e3c8-42e2-926f-49d0decb456c-operator-scripts\") pod \"nova-cell1-db-create-4q85d\" (UID: \"979e054b-e3c8-42e2-926f-49d0decb456c\") " pod="nova-kuttl-default/nova-cell1-db-create-4q85d" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.111966 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efaa3835-9199-4083-b21a-fe0513b1f665-operator-scripts\") pod \"nova-api-30c6-account-create-update-jsrs5\" (UID: \"efaa3835-9199-4083-b21a-fe0513b1f665\") " pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.143122 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-lh44q" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.149640 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv"] Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.150899 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.156988 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-cell0-db-secret" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.158138 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv"] Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.236752 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvptl\" (UniqueName: \"kubernetes.io/projected/efaa3835-9199-4083-b21a-fe0513b1f665-kube-api-access-vvptl\") pod \"nova-api-30c6-account-create-update-jsrs5\" (UID: \"efaa3835-9199-4083-b21a-fe0513b1f665\") " pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.236846 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9mr6\" (UniqueName: \"kubernetes.io/projected/979e054b-e3c8-42e2-926f-49d0decb456c-kube-api-access-l9mr6\") pod \"nova-cell1-db-create-4q85d\" (UID: \"979e054b-e3c8-42e2-926f-49d0decb456c\") " pod="nova-kuttl-default/nova-cell1-db-create-4q85d" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.236882 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979e054b-e3c8-42e2-926f-49d0decb456c-operator-scripts\") pod \"nova-cell1-db-create-4q85d\" (UID: \"979e054b-e3c8-42e2-926f-49d0decb456c\") " pod="nova-kuttl-default/nova-cell1-db-create-4q85d" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.236910 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efaa3835-9199-4083-b21a-fe0513b1f665-operator-scripts\") pod \"nova-api-30c6-account-create-update-jsrs5\" (UID: \"efaa3835-9199-4083-b21a-fe0513b1f665\") " pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.237767 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efaa3835-9199-4083-b21a-fe0513b1f665-operator-scripts\") pod \"nova-api-30c6-account-create-update-jsrs5\" (UID: \"efaa3835-9199-4083-b21a-fe0513b1f665\") " pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.238125 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979e054b-e3c8-42e2-926f-49d0decb456c-operator-scripts\") pod \"nova-cell1-db-create-4q85d\" (UID: \"979e054b-e3c8-42e2-926f-49d0decb456c\") " pod="nova-kuttl-default/nova-cell1-db-create-4q85d" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.255097 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9mr6\" (UniqueName: \"kubernetes.io/projected/979e054b-e3c8-42e2-926f-49d0decb456c-kube-api-access-l9mr6\") pod \"nova-cell1-db-create-4q85d\" (UID: \"979e054b-e3c8-42e2-926f-49d0decb456c\") " pod="nova-kuttl-default/nova-cell1-db-create-4q85d" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.255618 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvptl\" (UniqueName: \"kubernetes.io/projected/efaa3835-9199-4083-b21a-fe0513b1f665-kube-api-access-vvptl\") pod \"nova-api-30c6-account-create-update-jsrs5\" (UID: \"efaa3835-9199-4083-b21a-fe0513b1f665\") " pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.286917 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-4q85d" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.298306 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.331335 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg"] Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.332322 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.337117 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-cell1-db-secret" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.343865 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg"] Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.346405 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5263b1b-5e34-49db-a73a-4e179736aae9-operator-scripts\") pod \"nova-cell0-6077-account-create-update-dljdv\" (UID: \"e5263b1b-5e34-49db-a73a-4e179736aae9\") " pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.346466 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p424\" (UniqueName: \"kubernetes.io/projected/e5263b1b-5e34-49db-a73a-4e179736aae9-kube-api-access-7p424\") pod \"nova-cell0-6077-account-create-update-dljdv\" (UID: \"e5263b1b-5e34-49db-a73a-4e179736aae9\") " pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.447729 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p424\" (UniqueName: \"kubernetes.io/projected/e5263b1b-5e34-49db-a73a-4e179736aae9-kube-api-access-7p424\") pod \"nova-cell0-6077-account-create-update-dljdv\" (UID: \"e5263b1b-5e34-49db-a73a-4e179736aae9\") " pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.448142 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8s9t\" (UniqueName: \"kubernetes.io/projected/af839ca6-1c3d-41e0-807c-5154f96201c0-kube-api-access-r8s9t\") pod \"nova-cell1-044b-account-create-update-dvckg\" (UID: \"af839ca6-1c3d-41e0-807c-5154f96201c0\") " pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.448284 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af839ca6-1c3d-41e0-807c-5154f96201c0-operator-scripts\") pod \"nova-cell1-044b-account-create-update-dvckg\" (UID: \"af839ca6-1c3d-41e0-807c-5154f96201c0\") " pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.448333 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5263b1b-5e34-49db-a73a-4e179736aae9-operator-scripts\") pod \"nova-cell0-6077-account-create-update-dljdv\" (UID: \"e5263b1b-5e34-49db-a73a-4e179736aae9\") " pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.449482 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5263b1b-5e34-49db-a73a-4e179736aae9-operator-scripts\") pod \"nova-cell0-6077-account-create-update-dljdv\" (UID: \"e5263b1b-5e34-49db-a73a-4e179736aae9\") " pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.465773 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p424\" (UniqueName: \"kubernetes.io/projected/e5263b1b-5e34-49db-a73a-4e179736aae9-kube-api-access-7p424\") pod \"nova-cell0-6077-account-create-update-dljdv\" (UID: \"e5263b1b-5e34-49db-a73a-4e179736aae9\") " pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.474124 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:52:52 crc kubenswrapper[4711]: E0123 08:52:52.474381 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.518067 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-db-create-rtqsh"] Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.550253 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af839ca6-1c3d-41e0-807c-5154f96201c0-operator-scripts\") pod \"nova-cell1-044b-account-create-update-dvckg\" (UID: \"af839ca6-1c3d-41e0-807c-5154f96201c0\") " pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.550484 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8s9t\" (UniqueName: \"kubernetes.io/projected/af839ca6-1c3d-41e0-807c-5154f96201c0-kube-api-access-r8s9t\") pod \"nova-cell1-044b-account-create-update-dvckg\" (UID: \"af839ca6-1c3d-41e0-807c-5154f96201c0\") " pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.551120 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af839ca6-1c3d-41e0-807c-5154f96201c0-operator-scripts\") pod \"nova-cell1-044b-account-create-update-dvckg\" (UID: \"af839ca6-1c3d-41e0-807c-5154f96201c0\") " pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.556265 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.569581 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8s9t\" (UniqueName: \"kubernetes.io/projected/af839ca6-1c3d-41e0-807c-5154f96201c0-kube-api-access-r8s9t\") pod \"nova-cell1-044b-account-create-update-dvckg\" (UID: \"af839ca6-1c3d-41e0-807c-5154f96201c0\") " pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.622705 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-lh44q"] Jan 23 08:52:52 crc kubenswrapper[4711]: W0123 08:52:52.635049 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8aaefd64_d6df_4ecb_bdb2_ed135a281f26.slice/crio-a5b7cc4513344a2903c759c0b07d2db66fb03b8307d97ba25290171642217532 WatchSource:0}: Error finding container a5b7cc4513344a2903c759c0b07d2db66fb03b8307d97ba25290171642217532: Status 404 returned error can't find the container with id a5b7cc4513344a2903c759c0b07d2db66fb03b8307d97ba25290171642217532 Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.655284 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.758718 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-4q85d"] Jan 23 08:52:52 crc kubenswrapper[4711]: W0123 08:52:52.771127 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod979e054b_e3c8_42e2_926f_49d0decb456c.slice/crio-a708db90adf5fcdbe2230af015e272ce6c22231ca69107a6e03398fbbd107b88 WatchSource:0}: Error finding container a708db90adf5fcdbe2230af015e272ce6c22231ca69107a6e03398fbbd107b88: Status 404 returned error can't find the container with id a708db90adf5fcdbe2230af015e272ce6c22231ca69107a6e03398fbbd107b88 Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.820446 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5"] Jan 23 08:52:52 crc kubenswrapper[4711]: I0123 08:52:52.983130 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv"] Jan 23 08:52:52 crc kubenswrapper[4711]: W0123 08:52:52.985987 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5263b1b_5e34_49db_a73a_4e179736aae9.slice/crio-9183f9f4707ec1bbb9afdef4cb20c1dbf8c4710f66af6f5c33655acd77598887 WatchSource:0}: Error finding container 9183f9f4707ec1bbb9afdef4cb20c1dbf8c4710f66af6f5c33655acd77598887: Status 404 returned error can't find the container with id 9183f9f4707ec1bbb9afdef4cb20c1dbf8c4710f66af6f5c33655acd77598887 Jan 23 08:52:53 crc kubenswrapper[4711]: I0123 08:52:53.145502 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg"] Jan 23 08:52:53 crc kubenswrapper[4711]: W0123 08:52:53.148026 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf839ca6_1c3d_41e0_807c_5154f96201c0.slice/crio-c49f8c7f10f02ba136a41209c5452a467963ada8205b14ce6a31ca4061856277 WatchSource:0}: Error finding container c49f8c7f10f02ba136a41209c5452a467963ada8205b14ce6a31ca4061856277: Status 404 returned error can't find the container with id c49f8c7f10f02ba136a41209c5452a467963ada8205b14ce6a31ca4061856277 Jan 23 08:52:53 crc kubenswrapper[4711]: I0123 08:52:53.181782 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" event={"ID":"efaa3835-9199-4083-b21a-fe0513b1f665","Type":"ContainerStarted","Data":"c475463ca964105f37ce5ed0b406f23fe472881d88fdbf6f8e2f455cc994e97f"} Jan 23 08:52:53 crc kubenswrapper[4711]: I0123 08:52:53.183075 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-lh44q" event={"ID":"8aaefd64-d6df-4ecb-bdb2-ed135a281f26","Type":"ContainerStarted","Data":"a5b7cc4513344a2903c759c0b07d2db66fb03b8307d97ba25290171642217532"} Jan 23 08:52:53 crc kubenswrapper[4711]: I0123 08:52:53.184172 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-4q85d" event={"ID":"979e054b-e3c8-42e2-926f-49d0decb456c","Type":"ContainerStarted","Data":"a708db90adf5fcdbe2230af015e272ce6c22231ca69107a6e03398fbbd107b88"} Jan 23 08:52:53 crc kubenswrapper[4711]: I0123 08:52:53.185146 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-rtqsh" event={"ID":"b032059c-7845-4107-b676-1e1d66d18d16","Type":"ContainerStarted","Data":"87636ce413763091cdee0536c7d9eb9099b86db3b46b94e588be0d3152a19a67"} Jan 23 08:52:53 crc kubenswrapper[4711]: I0123 08:52:53.186113 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" event={"ID":"af839ca6-1c3d-41e0-807c-5154f96201c0","Type":"ContainerStarted","Data":"c49f8c7f10f02ba136a41209c5452a467963ada8205b14ce6a31ca4061856277"} Jan 23 08:52:53 crc kubenswrapper[4711]: I0123 08:52:53.187076 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" event={"ID":"e5263b1b-5e34-49db-a73a-4e179736aae9","Type":"ContainerStarted","Data":"9183f9f4707ec1bbb9afdef4cb20c1dbf8c4710f66af6f5c33655acd77598887"} Jan 23 08:52:54 crc kubenswrapper[4711]: I0123 08:52:54.199968 4711 generic.go:334] "Generic (PLEG): container finished" podID="e5263b1b-5e34-49db-a73a-4e179736aae9" containerID="830f6de51408cb657d26379fb5bed5e9675ccd99efcc4151de795194d1d88c8e" exitCode=0 Jan 23 08:52:54 crc kubenswrapper[4711]: I0123 08:52:54.200040 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" event={"ID":"e5263b1b-5e34-49db-a73a-4e179736aae9","Type":"ContainerDied","Data":"830f6de51408cb657d26379fb5bed5e9675ccd99efcc4151de795194d1d88c8e"} Jan 23 08:52:54 crc kubenswrapper[4711]: I0123 08:52:54.205039 4711 generic.go:334] "Generic (PLEG): container finished" podID="efaa3835-9199-4083-b21a-fe0513b1f665" containerID="f25c7e1f0ac7bcd46d1718f8839fd86e10c37c765e437ee25c5d4f5ab9988c27" exitCode=0 Jan 23 08:52:54 crc kubenswrapper[4711]: I0123 08:52:54.205132 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" event={"ID":"efaa3835-9199-4083-b21a-fe0513b1f665","Type":"ContainerDied","Data":"f25c7e1f0ac7bcd46d1718f8839fd86e10c37c765e437ee25c5d4f5ab9988c27"} Jan 23 08:52:54 crc kubenswrapper[4711]: I0123 08:52:54.208968 4711 generic.go:334] "Generic (PLEG): container finished" podID="8aaefd64-d6df-4ecb-bdb2-ed135a281f26" containerID="ae111dc8f9f752e37b2bd0eaefd038cd79f089329a361c8c2bef32721e850145" exitCode=0 Jan 23 08:52:54 crc kubenswrapper[4711]: I0123 08:52:54.209063 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-lh44q" event={"ID":"8aaefd64-d6df-4ecb-bdb2-ed135a281f26","Type":"ContainerDied","Data":"ae111dc8f9f752e37b2bd0eaefd038cd79f089329a361c8c2bef32721e850145"} Jan 23 08:52:54 crc kubenswrapper[4711]: I0123 08:52:54.211404 4711 generic.go:334] "Generic (PLEG): container finished" podID="979e054b-e3c8-42e2-926f-49d0decb456c" containerID="33dc4c76a175b75cec7c2a453f73a1fe297bf9e6ce5ea0498a73028452d12079" exitCode=0 Jan 23 08:52:54 crc kubenswrapper[4711]: I0123 08:52:54.211487 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-4q85d" event={"ID":"979e054b-e3c8-42e2-926f-49d0decb456c","Type":"ContainerDied","Data":"33dc4c76a175b75cec7c2a453f73a1fe297bf9e6ce5ea0498a73028452d12079"} Jan 23 08:52:54 crc kubenswrapper[4711]: I0123 08:52:54.213997 4711 generic.go:334] "Generic (PLEG): container finished" podID="b032059c-7845-4107-b676-1e1d66d18d16" containerID="342a94288bda567d38e56278eae18980f33e5376cfd98dc1ff7ba4bbbe7999f7" exitCode=0 Jan 23 08:52:54 crc kubenswrapper[4711]: I0123 08:52:54.214285 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-rtqsh" event={"ID":"b032059c-7845-4107-b676-1e1d66d18d16","Type":"ContainerDied","Data":"342a94288bda567d38e56278eae18980f33e5376cfd98dc1ff7ba4bbbe7999f7"} Jan 23 08:52:54 crc kubenswrapper[4711]: I0123 08:52:54.222534 4711 generic.go:334] "Generic (PLEG): container finished" podID="af839ca6-1c3d-41e0-807c-5154f96201c0" containerID="d6d6e6835d273b23e13d45475d3dd64ccbabb1cb78c758ca7934bc9df32bb17e" exitCode=0 Jan 23 08:52:54 crc kubenswrapper[4711]: I0123 08:52:54.222578 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" event={"ID":"af839ca6-1c3d-41e0-807c-5154f96201c0","Type":"ContainerDied","Data":"d6d6e6835d273b23e13d45475d3dd64ccbabb1cb78c758ca7934bc9df32bb17e"} Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.586660 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.609587 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvptl\" (UniqueName: \"kubernetes.io/projected/efaa3835-9199-4083-b21a-fe0513b1f665-kube-api-access-vvptl\") pod \"efaa3835-9199-4083-b21a-fe0513b1f665\" (UID: \"efaa3835-9199-4083-b21a-fe0513b1f665\") " Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.609822 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efaa3835-9199-4083-b21a-fe0513b1f665-operator-scripts\") pod \"efaa3835-9199-4083-b21a-fe0513b1f665\" (UID: \"efaa3835-9199-4083-b21a-fe0513b1f665\") " Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.610892 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efaa3835-9199-4083-b21a-fe0513b1f665-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "efaa3835-9199-4083-b21a-fe0513b1f665" (UID: "efaa3835-9199-4083-b21a-fe0513b1f665"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.616861 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efaa3835-9199-4083-b21a-fe0513b1f665-kube-api-access-vvptl" (OuterVolumeSpecName: "kube-api-access-vvptl") pod "efaa3835-9199-4083-b21a-fe0513b1f665" (UID: "efaa3835-9199-4083-b21a-fe0513b1f665"). InnerVolumeSpecName "kube-api-access-vvptl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.712691 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvptl\" (UniqueName: \"kubernetes.io/projected/efaa3835-9199-4083-b21a-fe0513b1f665-kube-api-access-vvptl\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.715813 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efaa3835-9199-4083-b21a-fe0513b1f665-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.765350 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-lh44q" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.785414 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.796486 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.808497 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-rtqsh" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.815187 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-4q85d" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.817961 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7p424\" (UniqueName: \"kubernetes.io/projected/e5263b1b-5e34-49db-a73a-4e179736aae9-kube-api-access-7p424\") pod \"e5263b1b-5e34-49db-a73a-4e179736aae9\" (UID: \"e5263b1b-5e34-49db-a73a-4e179736aae9\") " Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.818079 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8aaefd64-d6df-4ecb-bdb2-ed135a281f26-operator-scripts\") pod \"8aaefd64-d6df-4ecb-bdb2-ed135a281f26\" (UID: \"8aaefd64-d6df-4ecb-bdb2-ed135a281f26\") " Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.818244 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af839ca6-1c3d-41e0-807c-5154f96201c0-operator-scripts\") pod \"af839ca6-1c3d-41e0-807c-5154f96201c0\" (UID: \"af839ca6-1c3d-41e0-807c-5154f96201c0\") " Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.818319 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8s9t\" (UniqueName: \"kubernetes.io/projected/af839ca6-1c3d-41e0-807c-5154f96201c0-kube-api-access-r8s9t\") pod \"af839ca6-1c3d-41e0-807c-5154f96201c0\" (UID: \"af839ca6-1c3d-41e0-807c-5154f96201c0\") " Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.818401 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk8f4\" (UniqueName: \"kubernetes.io/projected/8aaefd64-d6df-4ecb-bdb2-ed135a281f26-kube-api-access-gk8f4\") pod \"8aaefd64-d6df-4ecb-bdb2-ed135a281f26\" (UID: \"8aaefd64-d6df-4ecb-bdb2-ed135a281f26\") " Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.818435 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5263b1b-5e34-49db-a73a-4e179736aae9-operator-scripts\") pod \"e5263b1b-5e34-49db-a73a-4e179736aae9\" (UID: \"e5263b1b-5e34-49db-a73a-4e179736aae9\") " Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.822066 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5263b1b-5e34-49db-a73a-4e179736aae9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e5263b1b-5e34-49db-a73a-4e179736aae9" (UID: "e5263b1b-5e34-49db-a73a-4e179736aae9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.822414 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8aaefd64-d6df-4ecb-bdb2-ed135a281f26-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8aaefd64-d6df-4ecb-bdb2-ed135a281f26" (UID: "8aaefd64-d6df-4ecb-bdb2-ed135a281f26"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.823655 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af839ca6-1c3d-41e0-807c-5154f96201c0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "af839ca6-1c3d-41e0-807c-5154f96201c0" (UID: "af839ca6-1c3d-41e0-807c-5154f96201c0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.824576 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af839ca6-1c3d-41e0-807c-5154f96201c0-kube-api-access-r8s9t" (OuterVolumeSpecName: "kube-api-access-r8s9t") pod "af839ca6-1c3d-41e0-807c-5154f96201c0" (UID: "af839ca6-1c3d-41e0-807c-5154f96201c0"). InnerVolumeSpecName "kube-api-access-r8s9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.826218 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8aaefd64-d6df-4ecb-bdb2-ed135a281f26-kube-api-access-gk8f4" (OuterVolumeSpecName: "kube-api-access-gk8f4") pod "8aaefd64-d6df-4ecb-bdb2-ed135a281f26" (UID: "8aaefd64-d6df-4ecb-bdb2-ed135a281f26"). InnerVolumeSpecName "kube-api-access-gk8f4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.827547 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5263b1b-5e34-49db-a73a-4e179736aae9-kube-api-access-7p424" (OuterVolumeSpecName: "kube-api-access-7p424") pod "e5263b1b-5e34-49db-a73a-4e179736aae9" (UID: "e5263b1b-5e34-49db-a73a-4e179736aae9"). InnerVolumeSpecName "kube-api-access-7p424". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.921338 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vh5lc\" (UniqueName: \"kubernetes.io/projected/b032059c-7845-4107-b676-1e1d66d18d16-kube-api-access-vh5lc\") pod \"b032059c-7845-4107-b676-1e1d66d18d16\" (UID: \"b032059c-7845-4107-b676-1e1d66d18d16\") " Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.921415 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979e054b-e3c8-42e2-926f-49d0decb456c-operator-scripts\") pod \"979e054b-e3c8-42e2-926f-49d0decb456c\" (UID: \"979e054b-e3c8-42e2-926f-49d0decb456c\") " Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.921612 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b032059c-7845-4107-b676-1e1d66d18d16-operator-scripts\") pod \"b032059c-7845-4107-b676-1e1d66d18d16\" (UID: \"b032059c-7845-4107-b676-1e1d66d18d16\") " Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.921676 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9mr6\" (UniqueName: \"kubernetes.io/projected/979e054b-e3c8-42e2-926f-49d0decb456c-kube-api-access-l9mr6\") pod \"979e054b-e3c8-42e2-926f-49d0decb456c\" (UID: \"979e054b-e3c8-42e2-926f-49d0decb456c\") " Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.922071 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af839ca6-1c3d-41e0-807c-5154f96201c0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.922081 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b032059c-7845-4107-b676-1e1d66d18d16-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b032059c-7845-4107-b676-1e1d66d18d16" (UID: "b032059c-7845-4107-b676-1e1d66d18d16"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.922098 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8s9t\" (UniqueName: \"kubernetes.io/projected/af839ca6-1c3d-41e0-807c-5154f96201c0-kube-api-access-r8s9t\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.922171 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk8f4\" (UniqueName: \"kubernetes.io/projected/8aaefd64-d6df-4ecb-bdb2-ed135a281f26-kube-api-access-gk8f4\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.922187 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5263b1b-5e34-49db-a73a-4e179736aae9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.922204 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7p424\" (UniqueName: \"kubernetes.io/projected/e5263b1b-5e34-49db-a73a-4e179736aae9-kube-api-access-7p424\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.922217 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8aaefd64-d6df-4ecb-bdb2-ed135a281f26-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.922700 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/979e054b-e3c8-42e2-926f-49d0decb456c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "979e054b-e3c8-42e2-926f-49d0decb456c" (UID: "979e054b-e3c8-42e2-926f-49d0decb456c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.924906 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b032059c-7845-4107-b676-1e1d66d18d16-kube-api-access-vh5lc" (OuterVolumeSpecName: "kube-api-access-vh5lc") pod "b032059c-7845-4107-b676-1e1d66d18d16" (UID: "b032059c-7845-4107-b676-1e1d66d18d16"). InnerVolumeSpecName "kube-api-access-vh5lc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:55 crc kubenswrapper[4711]: I0123 08:52:55.925362 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/979e054b-e3c8-42e2-926f-49d0decb456c-kube-api-access-l9mr6" (OuterVolumeSpecName: "kube-api-access-l9mr6") pod "979e054b-e3c8-42e2-926f-49d0decb456c" (UID: "979e054b-e3c8-42e2-926f-49d0decb456c"). InnerVolumeSpecName "kube-api-access-l9mr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.023567 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b032059c-7845-4107-b676-1e1d66d18d16-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.023630 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9mr6\" (UniqueName: \"kubernetes.io/projected/979e054b-e3c8-42e2-926f-49d0decb456c-kube-api-access-l9mr6\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.023642 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vh5lc\" (UniqueName: \"kubernetes.io/projected/b032059c-7845-4107-b676-1e1d66d18d16-kube-api-access-vh5lc\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.023652 4711 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979e054b-e3c8-42e2-926f-49d0decb456c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.238585 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.238582 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg" event={"ID":"af839ca6-1c3d-41e0-807c-5154f96201c0","Type":"ContainerDied","Data":"c49f8c7f10f02ba136a41209c5452a467963ada8205b14ce6a31ca4061856277"} Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.238718 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c49f8c7f10f02ba136a41209c5452a467963ada8205b14ce6a31ca4061856277" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.240432 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.240435 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv" event={"ID":"e5263b1b-5e34-49db-a73a-4e179736aae9","Type":"ContainerDied","Data":"9183f9f4707ec1bbb9afdef4cb20c1dbf8c4710f66af6f5c33655acd77598887"} Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.240575 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9183f9f4707ec1bbb9afdef4cb20c1dbf8c4710f66af6f5c33655acd77598887" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.242295 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" event={"ID":"efaa3835-9199-4083-b21a-fe0513b1f665","Type":"ContainerDied","Data":"c475463ca964105f37ce5ed0b406f23fe472881d88fdbf6f8e2f455cc994e97f"} Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.242321 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c475463ca964105f37ce5ed0b406f23fe472881d88fdbf6f8e2f455cc994e97f" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.242375 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.251581 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell0-db-create-lh44q" event={"ID":"8aaefd64-d6df-4ecb-bdb2-ed135a281f26","Type":"ContainerDied","Data":"a5b7cc4513344a2903c759c0b07d2db66fb03b8307d97ba25290171642217532"} Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.251751 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5b7cc4513344a2903c759c0b07d2db66fb03b8307d97ba25290171642217532" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.251629 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell0-db-create-lh44q" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.254788 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-cell1-db-create-4q85d" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.254775 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-cell1-db-create-4q85d" event={"ID":"979e054b-e3c8-42e2-926f-49d0decb456c","Type":"ContainerDied","Data":"a708db90adf5fcdbe2230af015e272ce6c22231ca69107a6e03398fbbd107b88"} Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.254890 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a708db90adf5fcdbe2230af015e272ce6c22231ca69107a6e03398fbbd107b88" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.257629 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-api-db-create-rtqsh" event={"ID":"b032059c-7845-4107-b676-1e1d66d18d16","Type":"ContainerDied","Data":"87636ce413763091cdee0536c7d9eb9099b86db3b46b94e588be0d3152a19a67"} Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.257661 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87636ce413763091cdee0536c7d9eb9099b86db3b46b94e588be0d3152a19a67" Jan 23 08:52:56 crc kubenswrapper[4711]: I0123 08:52:56.257678 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-api-db-create-rtqsh" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.347237 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv"] Jan 23 08:52:57 crc kubenswrapper[4711]: E0123 08:52:57.348226 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aaefd64-d6df-4ecb-bdb2-ed135a281f26" containerName="mariadb-database-create" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.348241 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aaefd64-d6df-4ecb-bdb2-ed135a281f26" containerName="mariadb-database-create" Jan 23 08:52:57 crc kubenswrapper[4711]: E0123 08:52:57.348268 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efaa3835-9199-4083-b21a-fe0513b1f665" containerName="mariadb-account-create-update" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.348277 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="efaa3835-9199-4083-b21a-fe0513b1f665" containerName="mariadb-account-create-update" Jan 23 08:52:57 crc kubenswrapper[4711]: E0123 08:52:57.348296 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b032059c-7845-4107-b676-1e1d66d18d16" containerName="mariadb-database-create" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.348306 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b032059c-7845-4107-b676-1e1d66d18d16" containerName="mariadb-database-create" Jan 23 08:52:57 crc kubenswrapper[4711]: E0123 08:52:57.348328 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="979e054b-e3c8-42e2-926f-49d0decb456c" containerName="mariadb-database-create" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.348337 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="979e054b-e3c8-42e2-926f-49d0decb456c" containerName="mariadb-database-create" Jan 23 08:52:57 crc kubenswrapper[4711]: E0123 08:52:57.348365 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af839ca6-1c3d-41e0-807c-5154f96201c0" containerName="mariadb-account-create-update" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.348374 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="af839ca6-1c3d-41e0-807c-5154f96201c0" containerName="mariadb-account-create-update" Jan 23 08:52:57 crc kubenswrapper[4711]: E0123 08:52:57.348393 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5263b1b-5e34-49db-a73a-4e179736aae9" containerName="mariadb-account-create-update" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.348400 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5263b1b-5e34-49db-a73a-4e179736aae9" containerName="mariadb-account-create-update" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.348815 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aaefd64-d6df-4ecb-bdb2-ed135a281f26" containerName="mariadb-database-create" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.348835 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="af839ca6-1c3d-41e0-807c-5154f96201c0" containerName="mariadb-account-create-update" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.348868 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b032059c-7845-4107-b676-1e1d66d18d16" containerName="mariadb-database-create" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.348887 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5263b1b-5e34-49db-a73a-4e179736aae9" containerName="mariadb-account-create-update" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.348902 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="efaa3835-9199-4083-b21a-fe0513b1f665" containerName="mariadb-account-create-update" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.348933 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="979e054b-e3c8-42e2-926f-49d0decb456c" containerName="mariadb-database-create" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.349799 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.358895 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-scripts" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.359090 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-config-data" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.359196 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-nova-kuttl-dockercfg-ssqct" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.371887 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv"] Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.449665 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/847224a6-d90c-4be4-a4ea-c60e7c6d9986-scripts\") pod \"nova-kuttl-cell0-conductor-db-sync-9zdrv\" (UID: \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.450361 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847224a6-d90c-4be4-a4ea-c60e7c6d9986-config-data\") pod \"nova-kuttl-cell0-conductor-db-sync-9zdrv\" (UID: \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.450579 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rh7xs\" (UniqueName: \"kubernetes.io/projected/847224a6-d90c-4be4-a4ea-c60e7c6d9986-kube-api-access-rh7xs\") pod \"nova-kuttl-cell0-conductor-db-sync-9zdrv\" (UID: \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.552615 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rh7xs\" (UniqueName: \"kubernetes.io/projected/847224a6-d90c-4be4-a4ea-c60e7c6d9986-kube-api-access-rh7xs\") pod \"nova-kuttl-cell0-conductor-db-sync-9zdrv\" (UID: \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.553189 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/847224a6-d90c-4be4-a4ea-c60e7c6d9986-scripts\") pod \"nova-kuttl-cell0-conductor-db-sync-9zdrv\" (UID: \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.553221 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847224a6-d90c-4be4-a4ea-c60e7c6d9986-config-data\") pod \"nova-kuttl-cell0-conductor-db-sync-9zdrv\" (UID: \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.560386 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/847224a6-d90c-4be4-a4ea-c60e7c6d9986-scripts\") pod \"nova-kuttl-cell0-conductor-db-sync-9zdrv\" (UID: \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.562599 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847224a6-d90c-4be4-a4ea-c60e7c6d9986-config-data\") pod \"nova-kuttl-cell0-conductor-db-sync-9zdrv\" (UID: \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.567603 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rh7xs\" (UniqueName: \"kubernetes.io/projected/847224a6-d90c-4be4-a4ea-c60e7c6d9986-kube-api-access-rh7xs\") pod \"nova-kuttl-cell0-conductor-db-sync-9zdrv\" (UID: \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:52:57 crc kubenswrapper[4711]: I0123 08:52:57.682475 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:52:58 crc kubenswrapper[4711]: I0123 08:52:58.102484 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv"] Jan 23 08:52:58 crc kubenswrapper[4711]: W0123 08:52:58.108737 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod847224a6_d90c_4be4_a4ea_c60e7c6d9986.slice/crio-6e531ca5b768b62beed1cce0b89929717e31c948f07a3f070d1263055902e670 WatchSource:0}: Error finding container 6e531ca5b768b62beed1cce0b89929717e31c948f07a3f070d1263055902e670: Status 404 returned error can't find the container with id 6e531ca5b768b62beed1cce0b89929717e31c948f07a3f070d1263055902e670 Jan 23 08:52:58 crc kubenswrapper[4711]: I0123 08:52:58.272708 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" event={"ID":"847224a6-d90c-4be4-a4ea-c60e7c6d9986","Type":"ContainerStarted","Data":"6e531ca5b768b62beed1cce0b89929717e31c948f07a3f070d1263055902e670"} Jan 23 08:52:59 crc kubenswrapper[4711]: I0123 08:52:59.283802 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" event={"ID":"847224a6-d90c-4be4-a4ea-c60e7c6d9986","Type":"ContainerStarted","Data":"6bb3d22d703dd9c6bbba85dda7c89041ba903a2bccfce80525e246e5664ca5d0"} Jan 23 08:52:59 crc kubenswrapper[4711]: I0123 08:52:59.303563 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" podStartSLOduration=2.303533594 podStartE2EDuration="2.303533594s" podCreationTimestamp="2026-01-23 08:52:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:52:59.302700964 +0000 UTC m=+1964.875657362" watchObservedRunningTime="2026-01-23 08:52:59.303533594 +0000 UTC m=+1964.876489972" Jan 23 08:53:03 crc kubenswrapper[4711]: I0123 08:53:03.326830 4711 generic.go:334] "Generic (PLEG): container finished" podID="847224a6-d90c-4be4-a4ea-c60e7c6d9986" containerID="6bb3d22d703dd9c6bbba85dda7c89041ba903a2bccfce80525e246e5664ca5d0" exitCode=0 Jan 23 08:53:03 crc kubenswrapper[4711]: I0123 08:53:03.326932 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" event={"ID":"847224a6-d90c-4be4-a4ea-c60e7c6d9986","Type":"ContainerDied","Data":"6bb3d22d703dd9c6bbba85dda7c89041ba903a2bccfce80525e246e5664ca5d0"} Jan 23 08:53:04 crc kubenswrapper[4711]: I0123 08:53:04.633125 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:53:04 crc kubenswrapper[4711]: I0123 08:53:04.682442 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/847224a6-d90c-4be4-a4ea-c60e7c6d9986-scripts\") pod \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\" (UID: \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\") " Jan 23 08:53:04 crc kubenswrapper[4711]: I0123 08:53:04.682697 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rh7xs\" (UniqueName: \"kubernetes.io/projected/847224a6-d90c-4be4-a4ea-c60e7c6d9986-kube-api-access-rh7xs\") pod \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\" (UID: \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\") " Jan 23 08:53:04 crc kubenswrapper[4711]: I0123 08:53:04.682843 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847224a6-d90c-4be4-a4ea-c60e7c6d9986-config-data\") pod \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\" (UID: \"847224a6-d90c-4be4-a4ea-c60e7c6d9986\") " Jan 23 08:53:04 crc kubenswrapper[4711]: I0123 08:53:04.687978 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/847224a6-d90c-4be4-a4ea-c60e7c6d9986-kube-api-access-rh7xs" (OuterVolumeSpecName: "kube-api-access-rh7xs") pod "847224a6-d90c-4be4-a4ea-c60e7c6d9986" (UID: "847224a6-d90c-4be4-a4ea-c60e7c6d9986"). InnerVolumeSpecName "kube-api-access-rh7xs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:53:04 crc kubenswrapper[4711]: I0123 08:53:04.688041 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/847224a6-d90c-4be4-a4ea-c60e7c6d9986-scripts" (OuterVolumeSpecName: "scripts") pod "847224a6-d90c-4be4-a4ea-c60e7c6d9986" (UID: "847224a6-d90c-4be4-a4ea-c60e7c6d9986"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:04 crc kubenswrapper[4711]: I0123 08:53:04.703757 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/847224a6-d90c-4be4-a4ea-c60e7c6d9986-config-data" (OuterVolumeSpecName: "config-data") pod "847224a6-d90c-4be4-a4ea-c60e7c6d9986" (UID: "847224a6-d90c-4be4-a4ea-c60e7c6d9986"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:04 crc kubenswrapper[4711]: I0123 08:53:04.784725 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847224a6-d90c-4be4-a4ea-c60e7c6d9986-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:04 crc kubenswrapper[4711]: I0123 08:53:04.784762 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/847224a6-d90c-4be4-a4ea-c60e7c6d9986-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:04 crc kubenswrapper[4711]: I0123 08:53:04.784771 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rh7xs\" (UniqueName: \"kubernetes.io/projected/847224a6-d90c-4be4-a4ea-c60e7c6d9986-kube-api-access-rh7xs\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.346574 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" event={"ID":"847224a6-d90c-4be4-a4ea-c60e7c6d9986","Type":"ContainerDied","Data":"6e531ca5b768b62beed1cce0b89929717e31c948f07a3f070d1263055902e670"} Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.347271 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e531ca5b768b62beed1cce0b89929717e31c948f07a3f070d1263055902e670" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.346791 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.418753 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:53:05 crc kubenswrapper[4711]: E0123 08:53:05.419057 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="847224a6-d90c-4be4-a4ea-c60e7c6d9986" containerName="nova-kuttl-cell0-conductor-db-sync" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.419072 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="847224a6-d90c-4be4-a4ea-c60e7c6d9986" containerName="nova-kuttl-cell0-conductor-db-sync" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.419204 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="847224a6-d90c-4be4-a4ea-c60e7c6d9986" containerName="nova-kuttl-cell0-conductor-db-sync" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.419730 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.424294 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-conductor-config-data" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.429216 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-nova-kuttl-dockercfg-ssqct" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.432883 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.496086 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.496138 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbzg8\" (UniqueName: \"kubernetes.io/projected/944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a-kube-api-access-nbzg8\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.597329 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbzg8\" (UniqueName: \"kubernetes.io/projected/944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a-kube-api-access-nbzg8\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.597517 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.601225 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a-config-data\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.615967 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbzg8\" (UniqueName: \"kubernetes.io/projected/944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a-kube-api-access-nbzg8\") pod \"nova-kuttl-cell0-conductor-0\" (UID: \"944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a\") " pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:53:05 crc kubenswrapper[4711]: I0123 08:53:05.740069 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:53:06 crc kubenswrapper[4711]: I0123 08:53:06.025906 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-0"] Jan 23 08:53:06 crc kubenswrapper[4711]: W0123 08:53:06.032417 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod944ce012_35b1_4e7a_a6ac_8c89d9b8cd1a.slice/crio-ee04c29cd775f7951f7ee4d3a143249606e3c1e7eb0e2b38f9d4325689464b89 WatchSource:0}: Error finding container ee04c29cd775f7951f7ee4d3a143249606e3c1e7eb0e2b38f9d4325689464b89: Status 404 returned error can't find the container with id ee04c29cd775f7951f7ee4d3a143249606e3c1e7eb0e2b38f9d4325689464b89 Jan 23 08:53:06 crc kubenswrapper[4711]: I0123 08:53:06.355718 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a","Type":"ContainerStarted","Data":"27c38d2e2e4ddcb2f96f6029043e998d11887b2ebf4d2b69b54d778cf357ad58"} Jan 23 08:53:06 crc kubenswrapper[4711]: I0123 08:53:06.356095 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" event={"ID":"944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a","Type":"ContainerStarted","Data":"ee04c29cd775f7951f7ee4d3a143249606e3c1e7eb0e2b38f9d4325689464b89"} Jan 23 08:53:06 crc kubenswrapper[4711]: I0123 08:53:06.356114 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:53:06 crc kubenswrapper[4711]: I0123 08:53:06.372431 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" podStartSLOduration=1.3724159249999999 podStartE2EDuration="1.372415925s" podCreationTimestamp="2026-01-23 08:53:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:06.37220535 +0000 UTC m=+1971.945161729" watchObservedRunningTime="2026-01-23 08:53:06.372415925 +0000 UTC m=+1971.945372293" Jan 23 08:53:06 crc kubenswrapper[4711]: I0123 08:53:06.473956 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:53:06 crc kubenswrapper[4711]: E0123 08:53:06.474179 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.099424 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-znfsx"] Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.101816 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.115945 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-znfsx"] Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.173410 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/266031b7-6634-414d-93d5-a4cef3b0f809-utilities\") pod \"certified-operators-znfsx\" (UID: \"266031b7-6634-414d-93d5-a4cef3b0f809\") " pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.173480 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr9wv\" (UniqueName: \"kubernetes.io/projected/266031b7-6634-414d-93d5-a4cef3b0f809-kube-api-access-kr9wv\") pod \"certified-operators-znfsx\" (UID: \"266031b7-6634-414d-93d5-a4cef3b0f809\") " pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.173649 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/266031b7-6634-414d-93d5-a4cef3b0f809-catalog-content\") pod \"certified-operators-znfsx\" (UID: \"266031b7-6634-414d-93d5-a4cef3b0f809\") " pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.274888 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/266031b7-6634-414d-93d5-a4cef3b0f809-utilities\") pod \"certified-operators-znfsx\" (UID: \"266031b7-6634-414d-93d5-a4cef3b0f809\") " pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.274971 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr9wv\" (UniqueName: \"kubernetes.io/projected/266031b7-6634-414d-93d5-a4cef3b0f809-kube-api-access-kr9wv\") pod \"certified-operators-znfsx\" (UID: \"266031b7-6634-414d-93d5-a4cef3b0f809\") " pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.275091 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/266031b7-6634-414d-93d5-a4cef3b0f809-catalog-content\") pod \"certified-operators-znfsx\" (UID: \"266031b7-6634-414d-93d5-a4cef3b0f809\") " pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.275556 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/266031b7-6634-414d-93d5-a4cef3b0f809-utilities\") pod \"certified-operators-znfsx\" (UID: \"266031b7-6634-414d-93d5-a4cef3b0f809\") " pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.275613 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/266031b7-6634-414d-93d5-a4cef3b0f809-catalog-content\") pod \"certified-operators-znfsx\" (UID: \"266031b7-6634-414d-93d5-a4cef3b0f809\") " pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.295011 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr9wv\" (UniqueName: \"kubernetes.io/projected/266031b7-6634-414d-93d5-a4cef3b0f809-kube-api-access-kr9wv\") pod \"certified-operators-znfsx\" (UID: \"266031b7-6634-414d-93d5-a4cef3b0f809\") " pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.425676 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.776187 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell0-conductor-0" Jan 23 08:53:15 crc kubenswrapper[4711]: I0123 08:53:15.953043 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-znfsx"] Jan 23 08:53:15 crc kubenswrapper[4711]: W0123 08:53:15.961191 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod266031b7_6634_414d_93d5_a4cef3b0f809.slice/crio-298f4c28d97392f2784975ff9eccbf157a5a00426d0474de05ea3fedea60cccc WatchSource:0}: Error finding container 298f4c28d97392f2784975ff9eccbf157a5a00426d0474de05ea3fedea60cccc: Status 404 returned error can't find the container with id 298f4c28d97392f2784975ff9eccbf157a5a00426d0474de05ea3fedea60cccc Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.202097 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc"] Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.203605 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.207648 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-manage-config-data" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.210017 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-manage-scripts" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.211403 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc"] Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.308218 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgpr6\" (UniqueName: \"kubernetes.io/projected/4e42940a-1607-4217-b21e-789504a59b2d-kube-api-access-dgpr6\") pod \"nova-kuttl-cell0-cell-mapping-4nfnc\" (UID: \"4e42940a-1607-4217-b21e-789504a59b2d\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.308323 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e42940a-1607-4217-b21e-789504a59b2d-config-data\") pod \"nova-kuttl-cell0-cell-mapping-4nfnc\" (UID: \"4e42940a-1607-4217-b21e-789504a59b2d\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.308359 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e42940a-1607-4217-b21e-789504a59b2d-scripts\") pod \"nova-kuttl-cell0-cell-mapping-4nfnc\" (UID: \"4e42940a-1607-4217-b21e-789504a59b2d\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.410203 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgpr6\" (UniqueName: \"kubernetes.io/projected/4e42940a-1607-4217-b21e-789504a59b2d-kube-api-access-dgpr6\") pod \"nova-kuttl-cell0-cell-mapping-4nfnc\" (UID: \"4e42940a-1607-4217-b21e-789504a59b2d\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.410303 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e42940a-1607-4217-b21e-789504a59b2d-config-data\") pod \"nova-kuttl-cell0-cell-mapping-4nfnc\" (UID: \"4e42940a-1607-4217-b21e-789504a59b2d\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.410332 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e42940a-1607-4217-b21e-789504a59b2d-scripts\") pod \"nova-kuttl-cell0-cell-mapping-4nfnc\" (UID: \"4e42940a-1607-4217-b21e-789504a59b2d\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.416258 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e42940a-1607-4217-b21e-789504a59b2d-scripts\") pod \"nova-kuttl-cell0-cell-mapping-4nfnc\" (UID: \"4e42940a-1607-4217-b21e-789504a59b2d\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.419742 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e42940a-1607-4217-b21e-789504a59b2d-config-data\") pod \"nova-kuttl-cell0-cell-mapping-4nfnc\" (UID: \"4e42940a-1607-4217-b21e-789504a59b2d\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.427905 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgpr6\" (UniqueName: \"kubernetes.io/projected/4e42940a-1607-4217-b21e-789504a59b2d-kube-api-access-dgpr6\") pod \"nova-kuttl-cell0-cell-mapping-4nfnc\" (UID: \"4e42940a-1607-4217-b21e-789504a59b2d\") " pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.435926 4711 generic.go:334] "Generic (PLEG): container finished" podID="266031b7-6634-414d-93d5-a4cef3b0f809" containerID="486f98e8c8f05c71fcadf383abfdbd8f6870c17fc496c5c6a5acb5b7cf5959c2" exitCode=0 Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.435982 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znfsx" event={"ID":"266031b7-6634-414d-93d5-a4cef3b0f809","Type":"ContainerDied","Data":"486f98e8c8f05c71fcadf383abfdbd8f6870c17fc496c5c6a5acb5b7cf5959c2"} Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.436012 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znfsx" event={"ID":"266031b7-6634-414d-93d5-a4cef3b0f809","Type":"ContainerStarted","Data":"298f4c28d97392f2784975ff9eccbf157a5a00426d0474de05ea3fedea60cccc"} Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.521864 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.525904 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.527095 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.529672 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-api-config-data" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.585194 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.600569 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.601646 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.608811 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-novncproxy-config-data" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.613357 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbc06f89-f565-4de4-9507-dff08baccc55-config-data\") pod \"nova-kuttl-api-0\" (UID: \"dbc06f89-f565-4de4-9507-dff08baccc55\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.613424 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dbc06f89-f565-4de4-9507-dff08baccc55-logs\") pod \"nova-kuttl-api-0\" (UID: \"dbc06f89-f565-4de4-9507-dff08baccc55\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.613464 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4jqt\" (UniqueName: \"kubernetes.io/projected/dbc06f89-f565-4de4-9507-dff08baccc55-kube-api-access-w4jqt\") pod \"nova-kuttl-api-0\" (UID: \"dbc06f89-f565-4de4-9507-dff08baccc55\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.632743 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.685820 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.687396 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.694917 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-metadata-config-data" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.719361 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dbc06f89-f565-4de4-9507-dff08baccc55-logs\") pod \"nova-kuttl-api-0\" (UID: \"dbc06f89-f565-4de4-9507-dff08baccc55\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.719442 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4jqt\" (UniqueName: \"kubernetes.io/projected/dbc06f89-f565-4de4-9507-dff08baccc55-kube-api-access-w4jqt\") pod \"nova-kuttl-api-0\" (UID: \"dbc06f89-f565-4de4-9507-dff08baccc55\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.719538 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b9c7342-6111-4e46-8bc7-6edcddd570af-config-data\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"4b9c7342-6111-4e46-8bc7-6edcddd570af\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.719567 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mggnt\" (UniqueName: \"kubernetes.io/projected/4b9c7342-6111-4e46-8bc7-6edcddd570af-kube-api-access-mggnt\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"4b9c7342-6111-4e46-8bc7-6edcddd570af\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.719633 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbc06f89-f565-4de4-9507-dff08baccc55-config-data\") pod \"nova-kuttl-api-0\" (UID: \"dbc06f89-f565-4de4-9507-dff08baccc55\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.720457 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dbc06f89-f565-4de4-9507-dff08baccc55-logs\") pod \"nova-kuttl-api-0\" (UID: \"dbc06f89-f565-4de4-9507-dff08baccc55\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.725399 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbc06f89-f565-4de4-9507-dff08baccc55-config-data\") pod \"nova-kuttl-api-0\" (UID: \"dbc06f89-f565-4de4-9507-dff08baccc55\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.751064 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4jqt\" (UniqueName: \"kubernetes.io/projected/dbc06f89-f565-4de4-9507-dff08baccc55-kube-api-access-w4jqt\") pod \"nova-kuttl-api-0\" (UID: \"dbc06f89-f565-4de4-9507-dff08baccc55\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.752468 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.813599 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.814753 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.822422 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.822550 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgsp4\" (UniqueName: \"kubernetes.io/projected/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-kube-api-access-zgsp4\") pod \"nova-kuttl-metadata-0\" (UID: \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.822607 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b9c7342-6111-4e46-8bc7-6edcddd570af-config-data\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"4b9c7342-6111-4e46-8bc7-6edcddd570af\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.822639 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mggnt\" (UniqueName: \"kubernetes.io/projected/4b9c7342-6111-4e46-8bc7-6edcddd570af-kube-api-access-mggnt\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"4b9c7342-6111-4e46-8bc7-6edcddd570af\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.822740 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.828741 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-scheduler-config-data" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.832227 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b9c7342-6111-4e46-8bc7-6edcddd570af-config-data\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"4b9c7342-6111-4e46-8bc7-6edcddd570af\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.845175 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.851784 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.853354 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mggnt\" (UniqueName: \"kubernetes.io/projected/4b9c7342-6111-4e46-8bc7-6edcddd570af-kube-api-access-mggnt\") pod \"nova-kuttl-cell1-novncproxy-0\" (UID: \"4b9c7342-6111-4e46-8bc7-6edcddd570af\") " pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.928296 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.928409 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.928448 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdf97\" (UniqueName: \"kubernetes.io/projected/33d0c830-0c71-46e9-8511-81c394e9d0bc-kube-api-access-tdf97\") pod \"nova-kuttl-scheduler-0\" (UID: \"33d0c830-0c71-46e9-8511-81c394e9d0bc\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.928724 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgsp4\" (UniqueName: \"kubernetes.io/projected/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-kube-api-access-zgsp4\") pod \"nova-kuttl-metadata-0\" (UID: \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.931747 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33d0c830-0c71-46e9-8511-81c394e9d0bc-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"33d0c830-0c71-46e9-8511-81c394e9d0bc\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.932680 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.932691 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.940782 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:16 crc kubenswrapper[4711]: I0123 08:53:16.959644 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgsp4\" (UniqueName: \"kubernetes.io/projected/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-kube-api-access-zgsp4\") pod \"nova-kuttl-metadata-0\" (UID: \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.014890 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.032740 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdf97\" (UniqueName: \"kubernetes.io/projected/33d0c830-0c71-46e9-8511-81c394e9d0bc-kube-api-access-tdf97\") pod \"nova-kuttl-scheduler-0\" (UID: \"33d0c830-0c71-46e9-8511-81c394e9d0bc\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.032867 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33d0c830-0c71-46e9-8511-81c394e9d0bc-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"33d0c830-0c71-46e9-8511-81c394e9d0bc\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.036586 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33d0c830-0c71-46e9-8511-81c394e9d0bc-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"33d0c830-0c71-46e9-8511-81c394e9d0bc\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.056603 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdf97\" (UniqueName: \"kubernetes.io/projected/33d0c830-0c71-46e9-8511-81c394e9d0bc-kube-api-access-tdf97\") pod \"nova-kuttl-scheduler-0\" (UID: \"33d0c830-0c71-46e9-8511-81c394e9d0bc\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.144472 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc"] Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.155360 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:17 crc kubenswrapper[4711]: W0123 08:53:17.163338 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e42940a_1607_4217_b21e_789504a59b2d.slice/crio-791f18145274fa16b1ed8d6bad7ef61a87aca93a6818e297639ff97bb4bd684c WatchSource:0}: Error finding container 791f18145274fa16b1ed8d6bad7ef61a87aca93a6818e297639ff97bb4bd684c: Status 404 returned error can't find the container with id 791f18145274fa16b1ed8d6bad7ef61a87aca93a6818e297639ff97bb4bd684c Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.360044 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7"] Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.368441 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.375757 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7"] Jan 23 08:53:17 crc kubenswrapper[4711]: W0123 08:53:17.377980 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddbc06f89_f565_4de4_9507_dff08baccc55.slice/crio-a8e78ea4307fab43cda9746a18fd26e155c5daea703796514cbe9734f0fd24ae WatchSource:0}: Error finding container a8e78ea4307fab43cda9746a18fd26e155c5daea703796514cbe9734f0fd24ae: Status 404 returned error can't find the container with id a8e78ea4307fab43cda9746a18fd26e155c5daea703796514cbe9734f0fd24ae Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.379138 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-config-data" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.382608 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-scripts" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.398543 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.451939 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"dbc06f89-f565-4de4-9507-dff08baccc55","Type":"ContainerStarted","Data":"a8e78ea4307fab43cda9746a18fd26e155c5daea703796514cbe9734f0fd24ae"} Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.460110 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" event={"ID":"4e42940a-1607-4217-b21e-789504a59b2d","Type":"ContainerStarted","Data":"dedb0859c4dece08baa08290f8f1a1da1209d40672e72b589923b7e0061358c4"} Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.460173 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" event={"ID":"4e42940a-1607-4217-b21e-789504a59b2d","Type":"ContainerStarted","Data":"791f18145274fa16b1ed8d6bad7ef61a87aca93a6818e297639ff97bb4bd684c"} Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.469183 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee1bec46-a27c-4fac-b0c7-71eb3671700e-config-data\") pod \"nova-kuttl-cell1-conductor-db-sync-82pn7\" (UID: \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.469245 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee1bec46-a27c-4fac-b0c7-71eb3671700e-scripts\") pod \"nova-kuttl-cell1-conductor-db-sync-82pn7\" (UID: \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.469313 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-novncproxy-0"] Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.469351 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkld9\" (UniqueName: \"kubernetes.io/projected/ee1bec46-a27c-4fac-b0c7-71eb3671700e-kube-api-access-qkld9\") pod \"nova-kuttl-cell1-conductor-db-sync-82pn7\" (UID: \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.484187 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" podStartSLOduration=1.484166686 podStartE2EDuration="1.484166686s" podCreationTimestamp="2026-01-23 08:53:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:17.482491605 +0000 UTC m=+1983.055447973" watchObservedRunningTime="2026-01-23 08:53:17.484166686 +0000 UTC m=+1983.057123054" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.493868 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znfsx" event={"ID":"266031b7-6634-414d-93d5-a4cef3b0f809","Type":"ContainerStarted","Data":"7dc2cff1a1b044adae72c97280b3b2f5fe49eb0871a875646f00654462f7137a"} Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.570183 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkld9\" (UniqueName: \"kubernetes.io/projected/ee1bec46-a27c-4fac-b0c7-71eb3671700e-kube-api-access-qkld9\") pod \"nova-kuttl-cell1-conductor-db-sync-82pn7\" (UID: \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.570309 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee1bec46-a27c-4fac-b0c7-71eb3671700e-config-data\") pod \"nova-kuttl-cell1-conductor-db-sync-82pn7\" (UID: \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.570366 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee1bec46-a27c-4fac-b0c7-71eb3671700e-scripts\") pod \"nova-kuttl-cell1-conductor-db-sync-82pn7\" (UID: \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.570911 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.578355 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee1bec46-a27c-4fac-b0c7-71eb3671700e-config-data\") pod \"nova-kuttl-cell1-conductor-db-sync-82pn7\" (UID: \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.579408 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee1bec46-a27c-4fac-b0c7-71eb3671700e-scripts\") pod \"nova-kuttl-cell1-conductor-db-sync-82pn7\" (UID: \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.592333 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkld9\" (UniqueName: \"kubernetes.io/projected/ee1bec46-a27c-4fac-b0c7-71eb3671700e-kube-api-access-qkld9\") pod \"nova-kuttl-cell1-conductor-db-sync-82pn7\" (UID: \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.686097 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:17 crc kubenswrapper[4711]: I0123 08:53:17.707999 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:17 crc kubenswrapper[4711]: W0123 08:53:17.721623 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33d0c830_0c71_46e9_8511_81c394e9d0bc.slice/crio-e89f724ace6369a38e588845ceb40d63d9da41ae9e7d519c86db20ce467195b2 WatchSource:0}: Error finding container e89f724ace6369a38e588845ceb40d63d9da41ae9e7d519c86db20ce467195b2: Status 404 returned error can't find the container with id e89f724ace6369a38e588845ceb40d63d9da41ae9e7d519c86db20ce467195b2 Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.129776 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7"] Jan 23 08:53:18 crc kubenswrapper[4711]: W0123 08:53:18.130392 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee1bec46_a27c_4fac_b0c7_71eb3671700e.slice/crio-6f9367995615e3a43c655631d2be17e8d12ce07ccb70c593d5c2b824e5faa11f WatchSource:0}: Error finding container 6f9367995615e3a43c655631d2be17e8d12ce07ccb70c593d5c2b824e5faa11f: Status 404 returned error can't find the container with id 6f9367995615e3a43c655631d2be17e8d12ce07ccb70c593d5c2b824e5faa11f Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.491792 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a","Type":"ContainerStarted","Data":"1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6"} Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.491852 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a","Type":"ContainerStarted","Data":"9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f"} Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.491866 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a","Type":"ContainerStarted","Data":"ca5e6e694168981194d8eb0bf80936eeb4df9a9a6f10819326f4a9263ccf658a"} Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.494368 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"33d0c830-0c71-46e9-8511-81c394e9d0bc","Type":"ContainerStarted","Data":"339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331"} Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.494408 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"33d0c830-0c71-46e9-8511-81c394e9d0bc","Type":"ContainerStarted","Data":"e89f724ace6369a38e588845ceb40d63d9da41ae9e7d519c86db20ce467195b2"} Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.496246 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"4b9c7342-6111-4e46-8bc7-6edcddd570af","Type":"ContainerStarted","Data":"02e1d814d19c7bc98499981249cf0560cf6073082a521a8a69445ccef027f930"} Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.496311 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" event={"ID":"4b9c7342-6111-4e46-8bc7-6edcddd570af","Type":"ContainerStarted","Data":"ff39742b24499c106750cae1569609f76b0309ceb4a4ecae231d1353aa4259c5"} Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.500278 4711 generic.go:334] "Generic (PLEG): container finished" podID="266031b7-6634-414d-93d5-a4cef3b0f809" containerID="7dc2cff1a1b044adae72c97280b3b2f5fe49eb0871a875646f00654462f7137a" exitCode=0 Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.500348 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znfsx" event={"ID":"266031b7-6634-414d-93d5-a4cef3b0f809","Type":"ContainerDied","Data":"7dc2cff1a1b044adae72c97280b3b2f5fe49eb0871a875646f00654462f7137a"} Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.509809 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"dbc06f89-f565-4de4-9507-dff08baccc55","Type":"ContainerStarted","Data":"4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4"} Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.509858 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"dbc06f89-f565-4de4-9507-dff08baccc55","Type":"ContainerStarted","Data":"fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c"} Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.512918 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" event={"ID":"ee1bec46-a27c-4fac-b0c7-71eb3671700e","Type":"ContainerStarted","Data":"5b1aa2303b3b35c37b8d8d0638073afef75d1a12c293176098ebc8ee8d5b784b"} Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.512949 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" event={"ID":"ee1bec46-a27c-4fac-b0c7-71eb3671700e","Type":"ContainerStarted","Data":"6f9367995615e3a43c655631d2be17e8d12ce07ccb70c593d5c2b824e5faa11f"} Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.513981 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-0" podStartSLOduration=2.513960142 podStartE2EDuration="2.513960142s" podCreationTimestamp="2026-01-23 08:53:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:18.508881847 +0000 UTC m=+1984.081838235" watchObservedRunningTime="2026-01-23 08:53:18.513960142 +0000 UTC m=+1984.086916510" Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.529661 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" podStartSLOduration=2.529640666 podStartE2EDuration="2.529640666s" podCreationTimestamp="2026-01-23 08:53:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:18.524199342 +0000 UTC m=+1984.097155710" watchObservedRunningTime="2026-01-23 08:53:18.529640666 +0000 UTC m=+1984.102597034" Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.580548 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podStartSLOduration=2.580530102 podStartE2EDuration="2.580530102s" podCreationTimestamp="2026-01-23 08:53:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:18.575228372 +0000 UTC m=+1984.148184740" watchObservedRunningTime="2026-01-23 08:53:18.580530102 +0000 UTC m=+1984.153486460" Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.596043 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" podStartSLOduration=1.596020592 podStartE2EDuration="1.596020592s" podCreationTimestamp="2026-01-23 08:53:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:18.589309317 +0000 UTC m=+1984.162265685" watchObservedRunningTime="2026-01-23 08:53:18.596020592 +0000 UTC m=+1984.168976960" Jan 23 08:53:18 crc kubenswrapper[4711]: I0123 08:53:18.616103 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-0" podStartSLOduration=2.616080554 podStartE2EDuration="2.616080554s" podCreationTimestamp="2026-01-23 08:53:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:18.61429492 +0000 UTC m=+1984.187251288" watchObservedRunningTime="2026-01-23 08:53:18.616080554 +0000 UTC m=+1984.189036922" Jan 23 08:53:19 crc kubenswrapper[4711]: I0123 08:53:19.474385 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:53:19 crc kubenswrapper[4711]: E0123 08:53:19.474772 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 08:53:20 crc kubenswrapper[4711]: I0123 08:53:20.533594 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znfsx" event={"ID":"266031b7-6634-414d-93d5-a4cef3b0f809","Type":"ContainerStarted","Data":"8a71b5401ccc9c283f33c8bffc2e3a38a71b631805c9f23d174c0b673211c028"} Jan 23 08:53:20 crc kubenswrapper[4711]: I0123 08:53:20.562993 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-znfsx" podStartSLOduration=2.513152047 podStartE2EDuration="5.562971782s" podCreationTimestamp="2026-01-23 08:53:15 +0000 UTC" firstStartedPulling="2026-01-23 08:53:16.437941999 +0000 UTC m=+1982.010898367" lastFinishedPulling="2026-01-23 08:53:19.487761724 +0000 UTC m=+1985.060718102" observedRunningTime="2026-01-23 08:53:20.557000876 +0000 UTC m=+1986.129957254" watchObservedRunningTime="2026-01-23 08:53:20.562971782 +0000 UTC m=+1986.135928160" Jan 23 08:53:21 crc kubenswrapper[4711]: I0123 08:53:21.543609 4711 generic.go:334] "Generic (PLEG): container finished" podID="ee1bec46-a27c-4fac-b0c7-71eb3671700e" containerID="5b1aa2303b3b35c37b8d8d0638073afef75d1a12c293176098ebc8ee8d5b784b" exitCode=0 Jan 23 08:53:21 crc kubenswrapper[4711]: I0123 08:53:21.543697 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" event={"ID":"ee1bec46-a27c-4fac-b0c7-71eb3671700e","Type":"ContainerDied","Data":"5b1aa2303b3b35c37b8d8d0638073afef75d1a12c293176098ebc8ee8d5b784b"} Jan 23 08:53:21 crc kubenswrapper[4711]: I0123 08:53:21.933353 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:53:22 crc kubenswrapper[4711]: I0123 08:53:22.015745 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:22 crc kubenswrapper[4711]: I0123 08:53:22.015828 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:22 crc kubenswrapper[4711]: I0123 08:53:22.156562 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:22 crc kubenswrapper[4711]: I0123 08:53:22.558095 4711 generic.go:334] "Generic (PLEG): container finished" podID="4e42940a-1607-4217-b21e-789504a59b2d" containerID="dedb0859c4dece08baa08290f8f1a1da1209d40672e72b589923b7e0061358c4" exitCode=0 Jan 23 08:53:22 crc kubenswrapper[4711]: I0123 08:53:22.558185 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" event={"ID":"4e42940a-1607-4217-b21e-789504a59b2d","Type":"ContainerDied","Data":"dedb0859c4dece08baa08290f8f1a1da1209d40672e72b589923b7e0061358c4"} Jan 23 08:53:22 crc kubenswrapper[4711]: I0123 08:53:22.886302 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:22.999886 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee1bec46-a27c-4fac-b0c7-71eb3671700e-config-data\") pod \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\" (UID: \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\") " Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:22.999959 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkld9\" (UniqueName: \"kubernetes.io/projected/ee1bec46-a27c-4fac-b0c7-71eb3671700e-kube-api-access-qkld9\") pod \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\" (UID: \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\") " Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.000036 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee1bec46-a27c-4fac-b0c7-71eb3671700e-scripts\") pod \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\" (UID: \"ee1bec46-a27c-4fac-b0c7-71eb3671700e\") " Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.009909 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee1bec46-a27c-4fac-b0c7-71eb3671700e-scripts" (OuterVolumeSpecName: "scripts") pod "ee1bec46-a27c-4fac-b0c7-71eb3671700e" (UID: "ee1bec46-a27c-4fac-b0c7-71eb3671700e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.010039 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee1bec46-a27c-4fac-b0c7-71eb3671700e-kube-api-access-qkld9" (OuterVolumeSpecName: "kube-api-access-qkld9") pod "ee1bec46-a27c-4fac-b0c7-71eb3671700e" (UID: "ee1bec46-a27c-4fac-b0c7-71eb3671700e"). InnerVolumeSpecName "kube-api-access-qkld9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.030202 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee1bec46-a27c-4fac-b0c7-71eb3671700e-config-data" (OuterVolumeSpecName: "config-data") pod "ee1bec46-a27c-4fac-b0c7-71eb3671700e" (UID: "ee1bec46-a27c-4fac-b0c7-71eb3671700e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.101596 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee1bec46-a27c-4fac-b0c7-71eb3671700e-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.101634 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee1bec46-a27c-4fac-b0c7-71eb3671700e-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.101647 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkld9\" (UniqueName: \"kubernetes.io/projected/ee1bec46-a27c-4fac-b0c7-71eb3671700e-kube-api-access-qkld9\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.568523 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" event={"ID":"ee1bec46-a27c-4fac-b0c7-71eb3671700e","Type":"ContainerDied","Data":"6f9367995615e3a43c655631d2be17e8d12ce07ccb70c593d5c2b824e5faa11f"} Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.568568 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f9367995615e3a43c655631d2be17e8d12ce07ccb70c593d5c2b824e5faa11f" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.568664 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.643678 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:53:23 crc kubenswrapper[4711]: E0123 08:53:23.644040 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee1bec46-a27c-4fac-b0c7-71eb3671700e" containerName="nova-kuttl-cell1-conductor-db-sync" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.644237 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee1bec46-a27c-4fac-b0c7-71eb3671700e" containerName="nova-kuttl-cell1-conductor-db-sync" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.644400 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee1bec46-a27c-4fac-b0c7-71eb3671700e" containerName="nova-kuttl-cell1-conductor-db-sync" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.645019 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.650594 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-conductor-config-data" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.659244 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.712783 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/704a67ad-6f29-43f2-b01f-be325aa8cb91-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"704a67ad-6f29-43f2-b01f-be325aa8cb91\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.712836 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vp6l\" (UniqueName: \"kubernetes.io/projected/704a67ad-6f29-43f2-b01f-be325aa8cb91-kube-api-access-9vp6l\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"704a67ad-6f29-43f2-b01f-be325aa8cb91\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.814572 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/704a67ad-6f29-43f2-b01f-be325aa8cb91-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"704a67ad-6f29-43f2-b01f-be325aa8cb91\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.814966 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vp6l\" (UniqueName: \"kubernetes.io/projected/704a67ad-6f29-43f2-b01f-be325aa8cb91-kube-api-access-9vp6l\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"704a67ad-6f29-43f2-b01f-be325aa8cb91\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.819782 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/704a67ad-6f29-43f2-b01f-be325aa8cb91-config-data\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"704a67ad-6f29-43f2-b01f-be325aa8cb91\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.830115 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vp6l\" (UniqueName: \"kubernetes.io/projected/704a67ad-6f29-43f2-b01f-be325aa8cb91-kube-api-access-9vp6l\") pod \"nova-kuttl-cell1-conductor-0\" (UID: \"704a67ad-6f29-43f2-b01f-be325aa8cb91\") " pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.892839 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.919199 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgpr6\" (UniqueName: \"kubernetes.io/projected/4e42940a-1607-4217-b21e-789504a59b2d-kube-api-access-dgpr6\") pod \"4e42940a-1607-4217-b21e-789504a59b2d\" (UID: \"4e42940a-1607-4217-b21e-789504a59b2d\") " Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.919522 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e42940a-1607-4217-b21e-789504a59b2d-config-data\") pod \"4e42940a-1607-4217-b21e-789504a59b2d\" (UID: \"4e42940a-1607-4217-b21e-789504a59b2d\") " Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.919577 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e42940a-1607-4217-b21e-789504a59b2d-scripts\") pod \"4e42940a-1607-4217-b21e-789504a59b2d\" (UID: \"4e42940a-1607-4217-b21e-789504a59b2d\") " Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.922557 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e42940a-1607-4217-b21e-789504a59b2d-scripts" (OuterVolumeSpecName: "scripts") pod "4e42940a-1607-4217-b21e-789504a59b2d" (UID: "4e42940a-1607-4217-b21e-789504a59b2d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.937747 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e42940a-1607-4217-b21e-789504a59b2d-kube-api-access-dgpr6" (OuterVolumeSpecName: "kube-api-access-dgpr6") pod "4e42940a-1607-4217-b21e-789504a59b2d" (UID: "4e42940a-1607-4217-b21e-789504a59b2d"). InnerVolumeSpecName "kube-api-access-dgpr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.947916 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e42940a-1607-4217-b21e-789504a59b2d-config-data" (OuterVolumeSpecName: "config-data") pod "4e42940a-1607-4217-b21e-789504a59b2d" (UID: "4e42940a-1607-4217-b21e-789504a59b2d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:23 crc kubenswrapper[4711]: I0123 08:53:23.968069 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.022566 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e42940a-1607-4217-b21e-789504a59b2d-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.022659 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e42940a-1607-4217-b21e-789504a59b2d-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.022668 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgpr6\" (UniqueName: \"kubernetes.io/projected/4e42940a-1607-4217-b21e-789504a59b2d-kube-api-access-dgpr6\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.371276 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-0"] Jan 23 08:53:24 crc kubenswrapper[4711]: W0123 08:53:24.371397 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod704a67ad_6f29_43f2_b01f_be325aa8cb91.slice/crio-61689f1be72007548a4c23308a18953a0470092185cd78cebf4b0e6ab33d3bd9 WatchSource:0}: Error finding container 61689f1be72007548a4c23308a18953a0470092185cd78cebf4b0e6ab33d3bd9: Status 404 returned error can't find the container with id 61689f1be72007548a4c23308a18953a0470092185cd78cebf4b0e6ab33d3bd9 Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.578022 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" event={"ID":"4e42940a-1607-4217-b21e-789504a59b2d","Type":"ContainerDied","Data":"791f18145274fa16b1ed8d6bad7ef61a87aca93a6818e297639ff97bb4bd684c"} Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.578046 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc" Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.578066 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="791f18145274fa16b1ed8d6bad7ef61a87aca93a6818e297639ff97bb4bd684c" Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.579361 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"704a67ad-6f29-43f2-b01f-be325aa8cb91","Type":"ContainerStarted","Data":"3ac731a288bfb594cb194a083112b0af6e25d245c603083df73c2263fe52b2b6"} Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.579390 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" event={"ID":"704a67ad-6f29-43f2-b01f-be325aa8cb91","Type":"ContainerStarted","Data":"61689f1be72007548a4c23308a18953a0470092185cd78cebf4b0e6ab33d3bd9"} Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.579528 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.604702 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" podStartSLOduration=1.6046791630000001 podStartE2EDuration="1.604679163s" podCreationTimestamp="2026-01-23 08:53:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:24.593432018 +0000 UTC m=+1990.166388406" watchObservedRunningTime="2026-01-23 08:53:24.604679163 +0000 UTC m=+1990.177635531" Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.762296 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.762579 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="dbc06f89-f565-4de4-9507-dff08baccc55" containerName="nova-kuttl-api-log" containerID="cri-o://fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c" gracePeriod=30 Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.762724 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="dbc06f89-f565-4de4-9507-dff08baccc55" containerName="nova-kuttl-api-api" containerID="cri-o://4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4" gracePeriod=30 Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.777672 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.777906 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="33d0c830-0c71-46e9-8511-81c394e9d0bc" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331" gracePeriod=30 Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.804287 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.804565 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" containerName="nova-kuttl-metadata-log" containerID="cri-o://9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f" gracePeriod=30 Jan 23 08:53:24 crc kubenswrapper[4711]: I0123 08:53:24.804692 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" containerName="nova-kuttl-metadata-metadata" containerID="cri-o://1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6" gracePeriod=30 Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.270005 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.342942 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4jqt\" (UniqueName: \"kubernetes.io/projected/dbc06f89-f565-4de4-9507-dff08baccc55-kube-api-access-w4jqt\") pod \"dbc06f89-f565-4de4-9507-dff08baccc55\" (UID: \"dbc06f89-f565-4de4-9507-dff08baccc55\") " Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.343015 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dbc06f89-f565-4de4-9507-dff08baccc55-logs\") pod \"dbc06f89-f565-4de4-9507-dff08baccc55\" (UID: \"dbc06f89-f565-4de4-9507-dff08baccc55\") " Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.343125 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbc06f89-f565-4de4-9507-dff08baccc55-config-data\") pod \"dbc06f89-f565-4de4-9507-dff08baccc55\" (UID: \"dbc06f89-f565-4de4-9507-dff08baccc55\") " Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.343517 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbc06f89-f565-4de4-9507-dff08baccc55-logs" (OuterVolumeSpecName: "logs") pod "dbc06f89-f565-4de4-9507-dff08baccc55" (UID: "dbc06f89-f565-4de4-9507-dff08baccc55"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.349653 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbc06f89-f565-4de4-9507-dff08baccc55-kube-api-access-w4jqt" (OuterVolumeSpecName: "kube-api-access-w4jqt") pod "dbc06f89-f565-4de4-9507-dff08baccc55" (UID: "dbc06f89-f565-4de4-9507-dff08baccc55"). InnerVolumeSpecName "kube-api-access-w4jqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.349995 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.364500 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbc06f89-f565-4de4-9507-dff08baccc55-config-data" (OuterVolumeSpecName: "config-data") pod "dbc06f89-f565-4de4-9507-dff08baccc55" (UID: "dbc06f89-f565-4de4-9507-dff08baccc55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.426359 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.426424 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.444489 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-logs\") pod \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\" (UID: \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\") " Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.444589 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgsp4\" (UniqueName: \"kubernetes.io/projected/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-kube-api-access-zgsp4\") pod \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\" (UID: \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\") " Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.444724 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-config-data\") pod \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\" (UID: \"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a\") " Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.444918 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-logs" (OuterVolumeSpecName: "logs") pod "35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" (UID: "35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.445079 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.445097 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4jqt\" (UniqueName: \"kubernetes.io/projected/dbc06f89-f565-4de4-9507-dff08baccc55-kube-api-access-w4jqt\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.445109 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dbc06f89-f565-4de4-9507-dff08baccc55-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.445117 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbc06f89-f565-4de4-9507-dff08baccc55-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.448103 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-kube-api-access-zgsp4" (OuterVolumeSpecName: "kube-api-access-zgsp4") pod "35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" (UID: "35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a"). InnerVolumeSpecName "kube-api-access-zgsp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.465066 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-config-data" (OuterVolumeSpecName: "config-data") pod "35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" (UID: "35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.469018 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.546210 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgsp4\" (UniqueName: \"kubernetes.io/projected/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-kube-api-access-zgsp4\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.546249 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.588331 4711 generic.go:334] "Generic (PLEG): container finished" podID="35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" containerID="1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6" exitCode=0 Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.588621 4711 generic.go:334] "Generic (PLEG): container finished" podID="35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" containerID="9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f" exitCode=143 Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.588657 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a","Type":"ContainerDied","Data":"1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6"} Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.588680 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a","Type":"ContainerDied","Data":"9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f"} Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.588690 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a","Type":"ContainerDied","Data":"ca5e6e694168981194d8eb0bf80936eeb4df9a9a6f10819326f4a9263ccf658a"} Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.588704 4711 scope.go:117] "RemoveContainer" containerID="1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.588797 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.592127 4711 generic.go:334] "Generic (PLEG): container finished" podID="dbc06f89-f565-4de4-9507-dff08baccc55" containerID="4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4" exitCode=0 Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.592143 4711 generic.go:334] "Generic (PLEG): container finished" podID="dbc06f89-f565-4de4-9507-dff08baccc55" containerID="fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c" exitCode=143 Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.592636 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.592711 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"dbc06f89-f565-4de4-9507-dff08baccc55","Type":"ContainerDied","Data":"4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4"} Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.592889 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"dbc06f89-f565-4de4-9507-dff08baccc55","Type":"ContainerDied","Data":"fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c"} Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.592907 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"dbc06f89-f565-4de4-9507-dff08baccc55","Type":"ContainerDied","Data":"a8e78ea4307fab43cda9746a18fd26e155c5daea703796514cbe9734f0fd24ae"} Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.611946 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.629003 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.638891 4711 scope.go:117] "RemoveContainer" containerID="9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.640557 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.662305 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.668345 4711 scope.go:117] "RemoveContainer" containerID="1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6" Jan 23 08:53:25 crc kubenswrapper[4711]: E0123 08:53:25.669044 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6\": container with ID starting with 1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6 not found: ID does not exist" containerID="1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.669096 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6"} err="failed to get container status \"1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6\": rpc error: code = NotFound desc = could not find container \"1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6\": container with ID starting with 1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6 not found: ID does not exist" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.669127 4711 scope.go:117] "RemoveContainer" containerID="9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f" Jan 23 08:53:25 crc kubenswrapper[4711]: E0123 08:53:25.669679 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f\": container with ID starting with 9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f not found: ID does not exist" containerID="9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.669723 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f"} err="failed to get container status \"9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f\": rpc error: code = NotFound desc = could not find container \"9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f\": container with ID starting with 9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f not found: ID does not exist" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.669760 4711 scope.go:117] "RemoveContainer" containerID="1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.670062 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6"} err="failed to get container status \"1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6\": rpc error: code = NotFound desc = could not find container \"1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6\": container with ID starting with 1d6fd736ca130d659bb06ba7202f574104eb8de09b0b5e55f310777202549bf6 not found: ID does not exist" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.670083 4711 scope.go:117] "RemoveContainer" containerID="9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.670882 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f"} err="failed to get container status \"9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f\": rpc error: code = NotFound desc = could not find container \"9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f\": container with ID starting with 9322eabb350a004c4af59ed1376473729300c1c3d5eacc9b7e31aa07d265331f not found: ID does not exist" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.670921 4711 scope.go:117] "RemoveContainer" containerID="4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.672963 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.685446 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:25 crc kubenswrapper[4711]: E0123 08:53:25.686040 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" containerName="nova-kuttl-metadata-log" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.686060 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" containerName="nova-kuttl-metadata-log" Jan 23 08:53:25 crc kubenswrapper[4711]: E0123 08:53:25.686073 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" containerName="nova-kuttl-metadata-metadata" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.686080 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" containerName="nova-kuttl-metadata-metadata" Jan 23 08:53:25 crc kubenswrapper[4711]: E0123 08:53:25.686115 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc06f89-f565-4de4-9507-dff08baccc55" containerName="nova-kuttl-api-log" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.686122 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc06f89-f565-4de4-9507-dff08baccc55" containerName="nova-kuttl-api-log" Jan 23 08:53:25 crc kubenswrapper[4711]: E0123 08:53:25.686135 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbc06f89-f565-4de4-9507-dff08baccc55" containerName="nova-kuttl-api-api" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.686141 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbc06f89-f565-4de4-9507-dff08baccc55" containerName="nova-kuttl-api-api" Jan 23 08:53:25 crc kubenswrapper[4711]: E0123 08:53:25.686155 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e42940a-1607-4217-b21e-789504a59b2d" containerName="nova-manage" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.686161 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e42940a-1607-4217-b21e-789504a59b2d" containerName="nova-manage" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.686363 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" containerName="nova-kuttl-metadata-metadata" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.686378 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc06f89-f565-4de4-9507-dff08baccc55" containerName="nova-kuttl-api-log" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.686388 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e42940a-1607-4217-b21e-789504a59b2d" containerName="nova-manage" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.686399 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbc06f89-f565-4de4-9507-dff08baccc55" containerName="nova-kuttl-api-api" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.686427 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" containerName="nova-kuttl-metadata-log" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.687383 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.689389 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-metadata-config-data" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.700077 4711 scope.go:117] "RemoveContainer" containerID="fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.712161 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.718962 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.720485 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.722699 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-api-config-data" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.723042 4711 scope.go:117] "RemoveContainer" containerID="4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4" Jan 23 08:53:25 crc kubenswrapper[4711]: E0123 08:53:25.723381 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4\": container with ID starting with 4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4 not found: ID does not exist" containerID="4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.723407 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4"} err="failed to get container status \"4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4\": rpc error: code = NotFound desc = could not find container \"4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4\": container with ID starting with 4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4 not found: ID does not exist" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.723427 4711 scope.go:117] "RemoveContainer" containerID="fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c" Jan 23 08:53:25 crc kubenswrapper[4711]: E0123 08:53:25.723739 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c\": container with ID starting with fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c not found: ID does not exist" containerID="fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.723763 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c"} err="failed to get container status \"fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c\": rpc error: code = NotFound desc = could not find container \"fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c\": container with ID starting with fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c not found: ID does not exist" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.723779 4711 scope.go:117] "RemoveContainer" containerID="4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.724078 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4"} err="failed to get container status \"4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4\": rpc error: code = NotFound desc = could not find container \"4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4\": container with ID starting with 4f97040fdf5635beaa32a7cd095b928862d985bb8ad5a8b87bd8326a6d2136c4 not found: ID does not exist" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.724133 4711 scope.go:117] "RemoveContainer" containerID="fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.724643 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c"} err="failed to get container status \"fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c\": rpc error: code = NotFound desc = could not find container \"fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c\": container with ID starting with fbfe850ee2c09e04f55813736ecb2f7fb2cb015d26cabdf54e3b66900154328c not found: ID does not exist" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.736672 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.755902 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-znfsx"] Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.760934 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a2adbe0-194b-4043-9f43-2257a5ff97b2-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.760982 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-logs\") pod \"nova-kuttl-api-0\" (UID: \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.761010 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfbrs\" (UniqueName: \"kubernetes.io/projected/6a2adbe0-194b-4043-9f43-2257a5ff97b2-kube-api-access-rfbrs\") pod \"nova-kuttl-metadata-0\" (UID: \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.761032 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-config-data\") pod \"nova-kuttl-api-0\" (UID: \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.761153 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd94f\" (UniqueName: \"kubernetes.io/projected/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-kube-api-access-pd94f\") pod \"nova-kuttl-api-0\" (UID: \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.761364 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a2adbe0-194b-4043-9f43-2257a5ff97b2-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.863038 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a2adbe0-194b-4043-9f43-2257a5ff97b2-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.863136 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a2adbe0-194b-4043-9f43-2257a5ff97b2-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.863165 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-logs\") pod \"nova-kuttl-api-0\" (UID: \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.863184 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfbrs\" (UniqueName: \"kubernetes.io/projected/6a2adbe0-194b-4043-9f43-2257a5ff97b2-kube-api-access-rfbrs\") pod \"nova-kuttl-metadata-0\" (UID: \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.863200 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-config-data\") pod \"nova-kuttl-api-0\" (UID: \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.863230 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd94f\" (UniqueName: \"kubernetes.io/projected/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-kube-api-access-pd94f\") pod \"nova-kuttl-api-0\" (UID: \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.863541 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a2adbe0-194b-4043-9f43-2257a5ff97b2-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.863595 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-logs\") pod \"nova-kuttl-api-0\" (UID: \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.868407 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-config-data\") pod \"nova-kuttl-api-0\" (UID: \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.878947 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a2adbe0-194b-4043-9f43-2257a5ff97b2-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.886084 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd94f\" (UniqueName: \"kubernetes.io/projected/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-kube-api-access-pd94f\") pod \"nova-kuttl-api-0\" (UID: \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:25 crc kubenswrapper[4711]: I0123 08:53:25.888496 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfbrs\" (UniqueName: \"kubernetes.io/projected/6a2adbe0-194b-4043-9f43-2257a5ff97b2-kube-api-access-rfbrs\") pod \"nova-kuttl-metadata-0\" (UID: \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:26 crc kubenswrapper[4711]: I0123 08:53:26.009707 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:26 crc kubenswrapper[4711]: I0123 08:53:26.036308 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:26 crc kubenswrapper[4711]: W0123 08:53:26.452867 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a2adbe0_194b_4043_9f43_2257a5ff97b2.slice/crio-a562458583ba32f23b32ca547bdc30dd3977202e573417d695edefeae4dbf2a2 WatchSource:0}: Error finding container a562458583ba32f23b32ca547bdc30dd3977202e573417d695edefeae4dbf2a2: Status 404 returned error can't find the container with id a562458583ba32f23b32ca547bdc30dd3977202e573417d695edefeae4dbf2a2 Jan 23 08:53:26 crc kubenswrapper[4711]: I0123 08:53:26.452925 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:26 crc kubenswrapper[4711]: I0123 08:53:26.514828 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:26 crc kubenswrapper[4711]: I0123 08:53:26.607192 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"6a2adbe0-194b-4043-9f43-2257a5ff97b2","Type":"ContainerStarted","Data":"3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277"} Jan 23 08:53:26 crc kubenswrapper[4711]: I0123 08:53:26.607227 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"6a2adbe0-194b-4043-9f43-2257a5ff97b2","Type":"ContainerStarted","Data":"a562458583ba32f23b32ca547bdc30dd3977202e573417d695edefeae4dbf2a2"} Jan 23 08:53:26 crc kubenswrapper[4711]: I0123 08:53:26.613627 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"82af09e6-75b7-4ea4-8718-14ecbb9e2a36","Type":"ContainerStarted","Data":"6cb86f7e935fc4a319b827651d8401a7e89f5a6176aca1a51b8d3d365e0f3351"} Jan 23 08:53:26 crc kubenswrapper[4711]: I0123 08:53:26.933059 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:53:26 crc kubenswrapper[4711]: I0123 08:53:26.946642 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:53:27 crc kubenswrapper[4711]: I0123 08:53:27.495754 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a" path="/var/lib/kubelet/pods/35b0d6e1-6bd7-44ce-a31c-a4b2a95ec20a/volumes" Jan 23 08:53:27 crc kubenswrapper[4711]: I0123 08:53:27.497029 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbc06f89-f565-4de4-9507-dff08baccc55" path="/var/lib/kubelet/pods/dbc06f89-f565-4de4-9507-dff08baccc55/volumes" Jan 23 08:53:27 crc kubenswrapper[4711]: I0123 08:53:27.624286 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"82af09e6-75b7-4ea4-8718-14ecbb9e2a36","Type":"ContainerStarted","Data":"22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2"} Jan 23 08:53:27 crc kubenswrapper[4711]: I0123 08:53:27.624331 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"82af09e6-75b7-4ea4-8718-14ecbb9e2a36","Type":"ContainerStarted","Data":"5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e"} Jan 23 08:53:27 crc kubenswrapper[4711]: I0123 08:53:27.629681 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"6a2adbe0-194b-4043-9f43-2257a5ff97b2","Type":"ContainerStarted","Data":"8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59"} Jan 23 08:53:27 crc kubenswrapper[4711]: I0123 08:53:27.629829 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-znfsx" podUID="266031b7-6634-414d-93d5-a4cef3b0f809" containerName="registry-server" containerID="cri-o://8a71b5401ccc9c283f33c8bffc2e3a38a71b631805c9f23d174c0b673211c028" gracePeriod=2 Jan 23 08:53:27 crc kubenswrapper[4711]: I0123 08:53:27.639772 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-novncproxy-0" Jan 23 08:53:27 crc kubenswrapper[4711]: I0123 08:53:27.650016 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-0" podStartSLOduration=2.6497436519999997 podStartE2EDuration="2.649743652s" podCreationTimestamp="2026-01-23 08:53:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:27.64069369 +0000 UTC m=+1993.213650058" watchObservedRunningTime="2026-01-23 08:53:27.649743652 +0000 UTC m=+1993.222700020" Jan 23 08:53:27 crc kubenswrapper[4711]: I0123 08:53:27.701290 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-0" podStartSLOduration=2.7012623639999997 podStartE2EDuration="2.701262364s" podCreationTimestamp="2026-01-23 08:53:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:27.690944561 +0000 UTC m=+1993.263900929" watchObservedRunningTime="2026-01-23 08:53:27.701262364 +0000 UTC m=+1993.274218742" Jan 23 08:53:28 crc kubenswrapper[4711]: I0123 08:53:28.640222 4711 generic.go:334] "Generic (PLEG): container finished" podID="266031b7-6634-414d-93d5-a4cef3b0f809" containerID="8a71b5401ccc9c283f33c8bffc2e3a38a71b631805c9f23d174c0b673211c028" exitCode=0 Jan 23 08:53:28 crc kubenswrapper[4711]: I0123 08:53:28.640381 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znfsx" event={"ID":"266031b7-6634-414d-93d5-a4cef3b0f809","Type":"ContainerDied","Data":"8a71b5401ccc9c283f33c8bffc2e3a38a71b631805c9f23d174c0b673211c028"} Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.592782 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.648329 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr9wv\" (UniqueName: \"kubernetes.io/projected/266031b7-6634-414d-93d5-a4cef3b0f809-kube-api-access-kr9wv\") pod \"266031b7-6634-414d-93d5-a4cef3b0f809\" (UID: \"266031b7-6634-414d-93d5-a4cef3b0f809\") " Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.648371 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/266031b7-6634-414d-93d5-a4cef3b0f809-utilities\") pod \"266031b7-6634-414d-93d5-a4cef3b0f809\" (UID: \"266031b7-6634-414d-93d5-a4cef3b0f809\") " Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.648405 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/266031b7-6634-414d-93d5-a4cef3b0f809-catalog-content\") pod \"266031b7-6634-414d-93d5-a4cef3b0f809\" (UID: \"266031b7-6634-414d-93d5-a4cef3b0f809\") " Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.650460 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/266031b7-6634-414d-93d5-a4cef3b0f809-utilities" (OuterVolumeSpecName: "utilities") pod "266031b7-6634-414d-93d5-a4cef3b0f809" (UID: "266031b7-6634-414d-93d5-a4cef3b0f809"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.654061 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-znfsx" event={"ID":"266031b7-6634-414d-93d5-a4cef3b0f809","Type":"ContainerDied","Data":"298f4c28d97392f2784975ff9eccbf157a5a00426d0474de05ea3fedea60cccc"} Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.654128 4711 scope.go:117] "RemoveContainer" containerID="8a71b5401ccc9c283f33c8bffc2e3a38a71b631805c9f23d174c0b673211c028" Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.654231 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-znfsx" Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.655998 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/266031b7-6634-414d-93d5-a4cef3b0f809-kube-api-access-kr9wv" (OuterVolumeSpecName: "kube-api-access-kr9wv") pod "266031b7-6634-414d-93d5-a4cef3b0f809" (UID: "266031b7-6634-414d-93d5-a4cef3b0f809"). InnerVolumeSpecName "kube-api-access-kr9wv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.707386 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/266031b7-6634-414d-93d5-a4cef3b0f809-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "266031b7-6634-414d-93d5-a4cef3b0f809" (UID: "266031b7-6634-414d-93d5-a4cef3b0f809"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.711129 4711 scope.go:117] "RemoveContainer" containerID="7dc2cff1a1b044adae72c97280b3b2f5fe49eb0871a875646f00654462f7137a" Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.738932 4711 scope.go:117] "RemoveContainer" containerID="486f98e8c8f05c71fcadf383abfdbd8f6870c17fc496c5c6a5acb5b7cf5959c2" Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.750620 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr9wv\" (UniqueName: \"kubernetes.io/projected/266031b7-6634-414d-93d5-a4cef3b0f809-kube-api-access-kr9wv\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.750651 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/266031b7-6634-414d-93d5-a4cef3b0f809-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.750663 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/266031b7-6634-414d-93d5-a4cef3b0f809-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:29 crc kubenswrapper[4711]: I0123 08:53:29.995478 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-znfsx"] Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.001864 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-znfsx"] Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.444995 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.568804 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33d0c830-0c71-46e9-8511-81c394e9d0bc-config-data\") pod \"33d0c830-0c71-46e9-8511-81c394e9d0bc\" (UID: \"33d0c830-0c71-46e9-8511-81c394e9d0bc\") " Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.568895 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdf97\" (UniqueName: \"kubernetes.io/projected/33d0c830-0c71-46e9-8511-81c394e9d0bc-kube-api-access-tdf97\") pod \"33d0c830-0c71-46e9-8511-81c394e9d0bc\" (UID: \"33d0c830-0c71-46e9-8511-81c394e9d0bc\") " Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.572610 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33d0c830-0c71-46e9-8511-81c394e9d0bc-kube-api-access-tdf97" (OuterVolumeSpecName: "kube-api-access-tdf97") pod "33d0c830-0c71-46e9-8511-81c394e9d0bc" (UID: "33d0c830-0c71-46e9-8511-81c394e9d0bc"). InnerVolumeSpecName "kube-api-access-tdf97". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.591947 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33d0c830-0c71-46e9-8511-81c394e9d0bc-config-data" (OuterVolumeSpecName: "config-data") pod "33d0c830-0c71-46e9-8511-81c394e9d0bc" (UID: "33d0c830-0c71-46e9-8511-81c394e9d0bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.662909 4711 generic.go:334] "Generic (PLEG): container finished" podID="33d0c830-0c71-46e9-8511-81c394e9d0bc" containerID="339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331" exitCode=0 Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.662980 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"33d0c830-0c71-46e9-8511-81c394e9d0bc","Type":"ContainerDied","Data":"339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331"} Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.663005 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"33d0c830-0c71-46e9-8511-81c394e9d0bc","Type":"ContainerDied","Data":"e89f724ace6369a38e588845ceb40d63d9da41ae9e7d519c86db20ce467195b2"} Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.663023 4711 scope.go:117] "RemoveContainer" containerID="339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.663395 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.670877 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33d0c830-0c71-46e9-8511-81c394e9d0bc-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.670904 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdf97\" (UniqueName: \"kubernetes.io/projected/33d0c830-0c71-46e9-8511-81c394e9d0bc-kube-api-access-tdf97\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.689586 4711 scope.go:117] "RemoveContainer" containerID="339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331" Jan 23 08:53:30 crc kubenswrapper[4711]: E0123 08:53:30.689972 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331\": container with ID starting with 339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331 not found: ID does not exist" containerID="339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.690022 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331"} err="failed to get container status \"339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331\": rpc error: code = NotFound desc = could not find container \"339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331\": container with ID starting with 339c5fea80e4e934890127ae67e3d121b07c7541ba723a2c5b61ff0977ab8331 not found: ID does not exist" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.695712 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.713877 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.719920 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:30 crc kubenswrapper[4711]: E0123 08:53:30.720379 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="266031b7-6634-414d-93d5-a4cef3b0f809" containerName="extract-content" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.720409 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="266031b7-6634-414d-93d5-a4cef3b0f809" containerName="extract-content" Jan 23 08:53:30 crc kubenswrapper[4711]: E0123 08:53:30.720433 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33d0c830-0c71-46e9-8511-81c394e9d0bc" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.720442 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="33d0c830-0c71-46e9-8511-81c394e9d0bc" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:53:30 crc kubenswrapper[4711]: E0123 08:53:30.720465 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="266031b7-6634-414d-93d5-a4cef3b0f809" containerName="extract-utilities" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.720479 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="266031b7-6634-414d-93d5-a4cef3b0f809" containerName="extract-utilities" Jan 23 08:53:30 crc kubenswrapper[4711]: E0123 08:53:30.720547 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="266031b7-6634-414d-93d5-a4cef3b0f809" containerName="registry-server" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.720560 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="266031b7-6634-414d-93d5-a4cef3b0f809" containerName="registry-server" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.720745 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="266031b7-6634-414d-93d5-a4cef3b0f809" containerName="registry-server" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.720787 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="33d0c830-0c71-46e9-8511-81c394e9d0bc" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.721458 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.723799 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-scheduler-config-data" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.726818 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.873531 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/725a0187-c0a3-4653-a9e4-61f497b5f672-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"725a0187-c0a3-4653-a9e4-61f497b5f672\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.873733 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pzgr\" (UniqueName: \"kubernetes.io/projected/725a0187-c0a3-4653-a9e4-61f497b5f672-kube-api-access-5pzgr\") pod \"nova-kuttl-scheduler-0\" (UID: \"725a0187-c0a3-4653-a9e4-61f497b5f672\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.975174 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pzgr\" (UniqueName: \"kubernetes.io/projected/725a0187-c0a3-4653-a9e4-61f497b5f672-kube-api-access-5pzgr\") pod \"nova-kuttl-scheduler-0\" (UID: \"725a0187-c0a3-4653-a9e4-61f497b5f672\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.975278 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/725a0187-c0a3-4653-a9e4-61f497b5f672-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"725a0187-c0a3-4653-a9e4-61f497b5f672\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.980709 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/725a0187-c0a3-4653-a9e4-61f497b5f672-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"725a0187-c0a3-4653-a9e4-61f497b5f672\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:30 crc kubenswrapper[4711]: I0123 08:53:30.991101 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pzgr\" (UniqueName: \"kubernetes.io/projected/725a0187-c0a3-4653-a9e4-61f497b5f672-kube-api-access-5pzgr\") pod \"nova-kuttl-scheduler-0\" (UID: \"725a0187-c0a3-4653-a9e4-61f497b5f672\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:31 crc kubenswrapper[4711]: I0123 08:53:31.010560 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:31 crc kubenswrapper[4711]: I0123 08:53:31.010623 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:31 crc kubenswrapper[4711]: I0123 08:53:31.058460 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:31 crc kubenswrapper[4711]: W0123 08:53:31.478154 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod725a0187_c0a3_4653_a9e4_61f497b5f672.slice/crio-1194d67f808c42303741f5ef0243958460777e3274b52df02b990d14c79f64c9 WatchSource:0}: Error finding container 1194d67f808c42303741f5ef0243958460777e3274b52df02b990d14c79f64c9: Status 404 returned error can't find the container with id 1194d67f808c42303741f5ef0243958460777e3274b52df02b990d14c79f64c9 Jan 23 08:53:31 crc kubenswrapper[4711]: I0123 08:53:31.485305 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="266031b7-6634-414d-93d5-a4cef3b0f809" path="/var/lib/kubelet/pods/266031b7-6634-414d-93d5-a4cef3b0f809/volumes" Jan 23 08:53:31 crc kubenswrapper[4711]: I0123 08:53:31.486738 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33d0c830-0c71-46e9-8511-81c394e9d0bc" path="/var/lib/kubelet/pods/33d0c830-0c71-46e9-8511-81c394e9d0bc/volumes" Jan 23 08:53:31 crc kubenswrapper[4711]: I0123 08:53:31.487422 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:31 crc kubenswrapper[4711]: I0123 08:53:31.674183 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"725a0187-c0a3-4653-a9e4-61f497b5f672","Type":"ContainerStarted","Data":"1194d67f808c42303741f5ef0243958460777e3274b52df02b990d14c79f64c9"} Jan 23 08:53:32 crc kubenswrapper[4711]: I0123 08:53:32.474392 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:53:32 crc kubenswrapper[4711]: I0123 08:53:32.685944 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"725a0187-c0a3-4653-a9e4-61f497b5f672","Type":"ContainerStarted","Data":"e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127"} Jan 23 08:53:32 crc kubenswrapper[4711]: I0123 08:53:32.721799 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podStartSLOduration=2.72177561 podStartE2EDuration="2.72177561s" podCreationTimestamp="2026-01-23 08:53:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:32.712207296 +0000 UTC m=+1998.285163664" watchObservedRunningTime="2026-01-23 08:53:32.72177561 +0000 UTC m=+1998.294731968" Jan 23 08:53:33 crc kubenswrapper[4711]: I0123 08:53:33.700812 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"6fc3e292f98695a914f968c4697ab5b4100d4aad931a282b95317fbc924708bc"} Jan 23 08:53:33 crc kubenswrapper[4711]: I0123 08:53:33.995752 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-cell1-conductor-0" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.448191 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc"] Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.449653 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.451411 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-manage-config-data" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.453427 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell1-manage-scripts" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.459255 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc"] Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.558870 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5518fb5f-7762-4beb-b66e-c5d463c3a672-scripts\") pod \"nova-kuttl-cell1-cell-mapping-2mtcc\" (UID: \"5518fb5f-7762-4beb-b66e-c5d463c3a672\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.558979 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5518fb5f-7762-4beb-b66e-c5d463c3a672-config-data\") pod \"nova-kuttl-cell1-cell-mapping-2mtcc\" (UID: \"5518fb5f-7762-4beb-b66e-c5d463c3a672\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.559018 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxvv7\" (UniqueName: \"kubernetes.io/projected/5518fb5f-7762-4beb-b66e-c5d463c3a672-kube-api-access-xxvv7\") pod \"nova-kuttl-cell1-cell-mapping-2mtcc\" (UID: \"5518fb5f-7762-4beb-b66e-c5d463c3a672\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.660656 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5518fb5f-7762-4beb-b66e-c5d463c3a672-config-data\") pod \"nova-kuttl-cell1-cell-mapping-2mtcc\" (UID: \"5518fb5f-7762-4beb-b66e-c5d463c3a672\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.660731 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxvv7\" (UniqueName: \"kubernetes.io/projected/5518fb5f-7762-4beb-b66e-c5d463c3a672-kube-api-access-xxvv7\") pod \"nova-kuttl-cell1-cell-mapping-2mtcc\" (UID: \"5518fb5f-7762-4beb-b66e-c5d463c3a672\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.660837 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5518fb5f-7762-4beb-b66e-c5d463c3a672-scripts\") pod \"nova-kuttl-cell1-cell-mapping-2mtcc\" (UID: \"5518fb5f-7762-4beb-b66e-c5d463c3a672\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.672263 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5518fb5f-7762-4beb-b66e-c5d463c3a672-scripts\") pod \"nova-kuttl-cell1-cell-mapping-2mtcc\" (UID: \"5518fb5f-7762-4beb-b66e-c5d463c3a672\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.672315 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5518fb5f-7762-4beb-b66e-c5d463c3a672-config-data\") pod \"nova-kuttl-cell1-cell-mapping-2mtcc\" (UID: \"5518fb5f-7762-4beb-b66e-c5d463c3a672\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.682205 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxvv7\" (UniqueName: \"kubernetes.io/projected/5518fb5f-7762-4beb-b66e-c5d463c3a672-kube-api-access-xxvv7\") pod \"nova-kuttl-cell1-cell-mapping-2mtcc\" (UID: \"5518fb5f-7762-4beb-b66e-c5d463c3a672\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:34 crc kubenswrapper[4711]: I0123 08:53:34.767726 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:35 crc kubenswrapper[4711]: I0123 08:53:35.224762 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc"] Jan 23 08:53:35 crc kubenswrapper[4711]: I0123 08:53:35.721333 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" event={"ID":"5518fb5f-7762-4beb-b66e-c5d463c3a672","Type":"ContainerStarted","Data":"45600017a11c1fc9751cd6e1c516c258c80f0ea63de60a23b4b6fff689a42150"} Jan 23 08:53:35 crc kubenswrapper[4711]: I0123 08:53:35.721384 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" event={"ID":"5518fb5f-7762-4beb-b66e-c5d463c3a672","Type":"ContainerStarted","Data":"33cebd672650839d9f9fb70b386581e9888ca8ec50e56b37dd4df8287299c54f"} Jan 23 08:53:35 crc kubenswrapper[4711]: I0123 08:53:35.745832 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" podStartSLOduration=1.745811724 podStartE2EDuration="1.745811724s" podCreationTimestamp="2026-01-23 08:53:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:35.738495525 +0000 UTC m=+2001.311451913" watchObservedRunningTime="2026-01-23 08:53:35.745811724 +0000 UTC m=+2001.318768322" Jan 23 08:53:36 crc kubenswrapper[4711]: I0123 08:53:36.010751 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:36 crc kubenswrapper[4711]: I0123 08:53:36.011050 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:36 crc kubenswrapper[4711]: I0123 08:53:36.037465 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:36 crc kubenswrapper[4711]: I0123 08:53:36.037546 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:36 crc kubenswrapper[4711]: I0123 08:53:36.059027 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:37 crc kubenswrapper[4711]: I0123 08:53:37.135784 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.231:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:53:37 crc kubenswrapper[4711]: I0123 08:53:37.176820 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.230:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:53:37 crc kubenswrapper[4711]: I0123 08:53:37.176884 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.231:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:53:37 crc kubenswrapper[4711]: I0123 08:53:37.177034 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.230:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:53:40 crc kubenswrapper[4711]: I0123 08:53:40.763612 4711 generic.go:334] "Generic (PLEG): container finished" podID="5518fb5f-7762-4beb-b66e-c5d463c3a672" containerID="45600017a11c1fc9751cd6e1c516c258c80f0ea63de60a23b4b6fff689a42150" exitCode=0 Jan 23 08:53:40 crc kubenswrapper[4711]: I0123 08:53:40.763683 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" event={"ID":"5518fb5f-7762-4beb-b66e-c5d463c3a672","Type":"ContainerDied","Data":"45600017a11c1fc9751cd6e1c516c258c80f0ea63de60a23b4b6fff689a42150"} Jan 23 08:53:41 crc kubenswrapper[4711]: I0123 08:53:41.059136 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:41 crc kubenswrapper[4711]: I0123 08:53:41.096358 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:41 crc kubenswrapper[4711]: I0123 08:53:41.806952 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.102938 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.180185 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxvv7\" (UniqueName: \"kubernetes.io/projected/5518fb5f-7762-4beb-b66e-c5d463c3a672-kube-api-access-xxvv7\") pod \"5518fb5f-7762-4beb-b66e-c5d463c3a672\" (UID: \"5518fb5f-7762-4beb-b66e-c5d463c3a672\") " Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.180744 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5518fb5f-7762-4beb-b66e-c5d463c3a672-config-data\") pod \"5518fb5f-7762-4beb-b66e-c5d463c3a672\" (UID: \"5518fb5f-7762-4beb-b66e-c5d463c3a672\") " Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.180865 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5518fb5f-7762-4beb-b66e-c5d463c3a672-scripts\") pod \"5518fb5f-7762-4beb-b66e-c5d463c3a672\" (UID: \"5518fb5f-7762-4beb-b66e-c5d463c3a672\") " Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.191581 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5518fb5f-7762-4beb-b66e-c5d463c3a672-scripts" (OuterVolumeSpecName: "scripts") pod "5518fb5f-7762-4beb-b66e-c5d463c3a672" (UID: "5518fb5f-7762-4beb-b66e-c5d463c3a672"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.193735 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5518fb5f-7762-4beb-b66e-c5d463c3a672-kube-api-access-xxvv7" (OuterVolumeSpecName: "kube-api-access-xxvv7") pod "5518fb5f-7762-4beb-b66e-c5d463c3a672" (UID: "5518fb5f-7762-4beb-b66e-c5d463c3a672"). InnerVolumeSpecName "kube-api-access-xxvv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.216622 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5518fb5f-7762-4beb-b66e-c5d463c3a672-config-data" (OuterVolumeSpecName: "config-data") pod "5518fb5f-7762-4beb-b66e-c5d463c3a672" (UID: "5518fb5f-7762-4beb-b66e-c5d463c3a672"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.282807 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxvv7\" (UniqueName: \"kubernetes.io/projected/5518fb5f-7762-4beb-b66e-c5d463c3a672-kube-api-access-xxvv7\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.282850 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5518fb5f-7762-4beb-b66e-c5d463c3a672-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.282860 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5518fb5f-7762-4beb-b66e-c5d463c3a672-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.789534 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.789499 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc" event={"ID":"5518fb5f-7762-4beb-b66e-c5d463c3a672","Type":"ContainerDied","Data":"33cebd672650839d9f9fb70b386581e9888ca8ec50e56b37dd4df8287299c54f"} Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.789607 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33cebd672650839d9f9fb70b386581e9888ca8ec50e56b37dd4df8287299c54f" Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.964549 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.965727 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" containerName="nova-kuttl-api-log" containerID="cri-o://5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e" gracePeriod=30 Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.965835 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" containerName="nova-kuttl-api-api" containerID="cri-o://22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2" gracePeriod=30 Jan 23 08:53:42 crc kubenswrapper[4711]: I0123 08:53:42.996447 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:43 crc kubenswrapper[4711]: I0123 08:53:43.059114 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:43 crc kubenswrapper[4711]: I0123 08:53:43.059397 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" containerName="nova-kuttl-metadata-log" containerID="cri-o://3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277" gracePeriod=30 Jan 23 08:53:43 crc kubenswrapper[4711]: I0123 08:53:43.059480 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" containerName="nova-kuttl-metadata-metadata" containerID="cri-o://8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59" gracePeriod=30 Jan 23 08:53:43 crc kubenswrapper[4711]: I0123 08:53:43.799115 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="725a0187-c0a3-4653-a9e4-61f497b5f672" containerName="nova-kuttl-scheduler-scheduler" containerID="cri-o://e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127" gracePeriod=30 Jan 23 08:53:44 crc kubenswrapper[4711]: I0123 08:53:44.809547 4711 generic.go:334] "Generic (PLEG): container finished" podID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" containerID="3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277" exitCode=143 Jan 23 08:53:44 crc kubenswrapper[4711]: I0123 08:53:44.809629 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"6a2adbe0-194b-4043-9f43-2257a5ff97b2","Type":"ContainerDied","Data":"3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277"} Jan 23 08:53:44 crc kubenswrapper[4711]: I0123 08:53:44.812498 4711 generic.go:334] "Generic (PLEG): container finished" podID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" containerID="5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e" exitCode=143 Jan 23 08:53:44 crc kubenswrapper[4711]: I0123 08:53:44.812560 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"82af09e6-75b7-4ea4-8718-14ecbb9e2a36","Type":"ContainerDied","Data":"5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e"} Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.060695 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.062134 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.063406 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.063445 4711 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podUID="725a0187-c0a3-4653-a9e4-61f497b5f672" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.575675 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.652276 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-logs\") pod \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\" (UID: \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\") " Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.652373 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-config-data\") pod \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\" (UID: \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\") " Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.652423 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pd94f\" (UniqueName: \"kubernetes.io/projected/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-kube-api-access-pd94f\") pod \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\" (UID: \"82af09e6-75b7-4ea4-8718-14ecbb9e2a36\") " Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.653704 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-logs" (OuterVolumeSpecName: "logs") pod "82af09e6-75b7-4ea4-8718-14ecbb9e2a36" (UID: "82af09e6-75b7-4ea4-8718-14ecbb9e2a36"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.658713 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-kube-api-access-pd94f" (OuterVolumeSpecName: "kube-api-access-pd94f") pod "82af09e6-75b7-4ea4-8718-14ecbb9e2a36" (UID: "82af09e6-75b7-4ea4-8718-14ecbb9e2a36"). InnerVolumeSpecName "kube-api-access-pd94f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.666399 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.680197 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-config-data" (OuterVolumeSpecName: "config-data") pod "82af09e6-75b7-4ea4-8718-14ecbb9e2a36" (UID: "82af09e6-75b7-4ea4-8718-14ecbb9e2a36"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.754318 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a2adbe0-194b-4043-9f43-2257a5ff97b2-config-data\") pod \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\" (UID: \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\") " Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.754401 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfbrs\" (UniqueName: \"kubernetes.io/projected/6a2adbe0-194b-4043-9f43-2257a5ff97b2-kube-api-access-rfbrs\") pod \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\" (UID: \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\") " Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.754479 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a2adbe0-194b-4043-9f43-2257a5ff97b2-logs\") pod \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\" (UID: \"6a2adbe0-194b-4043-9f43-2257a5ff97b2\") " Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.754883 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.754905 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pd94f\" (UniqueName: \"kubernetes.io/projected/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-kube-api-access-pd94f\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.754915 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82af09e6-75b7-4ea4-8718-14ecbb9e2a36-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.755282 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a2adbe0-194b-4043-9f43-2257a5ff97b2-logs" (OuterVolumeSpecName: "logs") pod "6a2adbe0-194b-4043-9f43-2257a5ff97b2" (UID: "6a2adbe0-194b-4043-9f43-2257a5ff97b2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.757181 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a2adbe0-194b-4043-9f43-2257a5ff97b2-kube-api-access-rfbrs" (OuterVolumeSpecName: "kube-api-access-rfbrs") pod "6a2adbe0-194b-4043-9f43-2257a5ff97b2" (UID: "6a2adbe0-194b-4043-9f43-2257a5ff97b2"). InnerVolumeSpecName "kube-api-access-rfbrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.772776 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a2adbe0-194b-4043-9f43-2257a5ff97b2-config-data" (OuterVolumeSpecName: "config-data") pod "6a2adbe0-194b-4043-9f43-2257a5ff97b2" (UID: "6a2adbe0-194b-4043-9f43-2257a5ff97b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.831393 4711 generic.go:334] "Generic (PLEG): container finished" podID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" containerID="8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59" exitCode=0 Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.831446 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"6a2adbe0-194b-4043-9f43-2257a5ff97b2","Type":"ContainerDied","Data":"8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59"} Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.831488 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.831862 4711 scope.go:117] "RemoveContainer" containerID="8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.831845 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"6a2adbe0-194b-4043-9f43-2257a5ff97b2","Type":"ContainerDied","Data":"a562458583ba32f23b32ca547bdc30dd3977202e573417d695edefeae4dbf2a2"} Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.834253 4711 generic.go:334] "Generic (PLEG): container finished" podID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" containerID="22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2" exitCode=0 Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.834547 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"82af09e6-75b7-4ea4-8718-14ecbb9e2a36","Type":"ContainerDied","Data":"22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2"} Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.834649 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"82af09e6-75b7-4ea4-8718-14ecbb9e2a36","Type":"ContainerDied","Data":"6cb86f7e935fc4a319b827651d8401a7e89f5a6176aca1a51b8d3d365e0f3351"} Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.834694 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.856170 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a2adbe0-194b-4043-9f43-2257a5ff97b2-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.856199 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfbrs\" (UniqueName: \"kubernetes.io/projected/6a2adbe0-194b-4043-9f43-2257a5ff97b2-kube-api-access-rfbrs\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.856208 4711 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a2adbe0-194b-4043-9f43-2257a5ff97b2-logs\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.878880 4711 scope.go:117] "RemoveContainer" containerID="3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.896605 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.906255 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.916412 4711 scope.go:117] "RemoveContainer" containerID="8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59" Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.917136 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59\": container with ID starting with 8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59 not found: ID does not exist" containerID="8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.917174 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59"} err="failed to get container status \"8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59\": rpc error: code = NotFound desc = could not find container \"8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59\": container with ID starting with 8ec87f3a26018f9fe8cb3bbdf2b848541cd26b101a2159625175a70d0822ea59 not found: ID does not exist" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.917198 4711 scope.go:117] "RemoveContainer" containerID="3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277" Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.921956 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277\": container with ID starting with 3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277 not found: ID does not exist" containerID="3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.922042 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277"} err="failed to get container status \"3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277\": rpc error: code = NotFound desc = could not find container \"3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277\": container with ID starting with 3c7cba2c175fe4f9e23b25442a85380140f0b3ef26e1006ff54deccd55c6b277 not found: ID does not exist" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.922092 4711 scope.go:117] "RemoveContainer" containerID="22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.926079 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.940140 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.953623 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.954088 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5518fb5f-7762-4beb-b66e-c5d463c3a672" containerName="nova-manage" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.954109 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="5518fb5f-7762-4beb-b66e-c5d463c3a672" containerName="nova-manage" Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.954125 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" containerName="nova-kuttl-metadata-log" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.954132 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" containerName="nova-kuttl-metadata-log" Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.954146 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" containerName="nova-kuttl-metadata-metadata" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.954152 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" containerName="nova-kuttl-metadata-metadata" Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.954160 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" containerName="nova-kuttl-api-log" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.954165 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" containerName="nova-kuttl-api-log" Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.954186 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" containerName="nova-kuttl-api-api" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.954194 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" containerName="nova-kuttl-api-api" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.954373 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="5518fb5f-7762-4beb-b66e-c5d463c3a672" containerName="nova-manage" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.954391 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" containerName="nova-kuttl-api-log" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.954400 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" containerName="nova-kuttl-api-api" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.954408 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" containerName="nova-kuttl-metadata-log" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.954419 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" containerName="nova-kuttl-metadata-metadata" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.955388 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.958833 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.959678 4711 scope.go:117] "RemoveContainer" containerID="5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.959956 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-metadata-config-data" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.968879 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.970290 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.974823 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-api-config-data" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.980042 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.987605 4711 scope.go:117] "RemoveContainer" containerID="22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2" Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.988046 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2\": container with ID starting with 22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2 not found: ID does not exist" containerID="22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.988081 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2"} err="failed to get container status \"22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2\": rpc error: code = NotFound desc = could not find container \"22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2\": container with ID starting with 22eef9cca044c169349674b9c7044ace7f90f4be0fe2a249c13006d7bf95fec2 not found: ID does not exist" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.988109 4711 scope.go:117] "RemoveContainer" containerID="5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e" Jan 23 08:53:46 crc kubenswrapper[4711]: E0123 08:53:46.988444 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e\": container with ID starting with 5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e not found: ID does not exist" containerID="5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e" Jan 23 08:53:46 crc kubenswrapper[4711]: I0123 08:53:46.988471 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e"} err="failed to get container status \"5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e\": rpc error: code = NotFound desc = could not find container \"5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e\": container with ID starting with 5f70f6258d1f8a2f1242aa164aca73d22e74438945deae948dfd9f7e99e4ae4e not found: ID does not exist" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.059223 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/667ff795-4e58-403c-9f54-bd5c2ace5456-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"667ff795-4e58-403c-9f54-bd5c2ace5456\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.059294 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f70146fe-2308-422b-9efa-42b334f7675f-config-data\") pod \"nova-kuttl-api-0\" (UID: \"f70146fe-2308-422b-9efa-42b334f7675f\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.059312 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f70146fe-2308-422b-9efa-42b334f7675f-logs\") pod \"nova-kuttl-api-0\" (UID: \"f70146fe-2308-422b-9efa-42b334f7675f\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.059328 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcd56\" (UniqueName: \"kubernetes.io/projected/f70146fe-2308-422b-9efa-42b334f7675f-kube-api-access-rcd56\") pod \"nova-kuttl-api-0\" (UID: \"f70146fe-2308-422b-9efa-42b334f7675f\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.059399 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd6mz\" (UniqueName: \"kubernetes.io/projected/667ff795-4e58-403c-9f54-bd5c2ace5456-kube-api-access-pd6mz\") pod \"nova-kuttl-metadata-0\" (UID: \"667ff795-4e58-403c-9f54-bd5c2ace5456\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.059434 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/667ff795-4e58-403c-9f54-bd5c2ace5456-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"667ff795-4e58-403c-9f54-bd5c2ace5456\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.160668 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/667ff795-4e58-403c-9f54-bd5c2ace5456-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"667ff795-4e58-403c-9f54-bd5c2ace5456\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.160975 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f70146fe-2308-422b-9efa-42b334f7675f-config-data\") pod \"nova-kuttl-api-0\" (UID: \"f70146fe-2308-422b-9efa-42b334f7675f\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.160996 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f70146fe-2308-422b-9efa-42b334f7675f-logs\") pod \"nova-kuttl-api-0\" (UID: \"f70146fe-2308-422b-9efa-42b334f7675f\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.161014 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcd56\" (UniqueName: \"kubernetes.io/projected/f70146fe-2308-422b-9efa-42b334f7675f-kube-api-access-rcd56\") pod \"nova-kuttl-api-0\" (UID: \"f70146fe-2308-422b-9efa-42b334f7675f\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.161042 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd6mz\" (UniqueName: \"kubernetes.io/projected/667ff795-4e58-403c-9f54-bd5c2ace5456-kube-api-access-pd6mz\") pod \"nova-kuttl-metadata-0\" (UID: \"667ff795-4e58-403c-9f54-bd5c2ace5456\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.161088 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/667ff795-4e58-403c-9f54-bd5c2ace5456-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"667ff795-4e58-403c-9f54-bd5c2ace5456\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.161439 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f70146fe-2308-422b-9efa-42b334f7675f-logs\") pod \"nova-kuttl-api-0\" (UID: \"f70146fe-2308-422b-9efa-42b334f7675f\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.161561 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/667ff795-4e58-403c-9f54-bd5c2ace5456-logs\") pod \"nova-kuttl-metadata-0\" (UID: \"667ff795-4e58-403c-9f54-bd5c2ace5456\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.164927 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f70146fe-2308-422b-9efa-42b334f7675f-config-data\") pod \"nova-kuttl-api-0\" (UID: \"f70146fe-2308-422b-9efa-42b334f7675f\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.164909 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/667ff795-4e58-403c-9f54-bd5c2ace5456-config-data\") pod \"nova-kuttl-metadata-0\" (UID: \"667ff795-4e58-403c-9f54-bd5c2ace5456\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.180054 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcd56\" (UniqueName: \"kubernetes.io/projected/f70146fe-2308-422b-9efa-42b334f7675f-kube-api-access-rcd56\") pod \"nova-kuttl-api-0\" (UID: \"f70146fe-2308-422b-9efa-42b334f7675f\") " pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.180742 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd6mz\" (UniqueName: \"kubernetes.io/projected/667ff795-4e58-403c-9f54-bd5c2ace5456-kube-api-access-pd6mz\") pod \"nova-kuttl-metadata-0\" (UID: \"667ff795-4e58-403c-9f54-bd5c2ace5456\") " pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.274840 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.295474 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.482799 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a2adbe0-194b-4043-9f43-2257a5ff97b2" path="/var/lib/kubelet/pods/6a2adbe0-194b-4043-9f43-2257a5ff97b2/volumes" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.483695 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82af09e6-75b7-4ea4-8718-14ecbb9e2a36" path="/var/lib/kubelet/pods/82af09e6-75b7-4ea4-8718-14ecbb9e2a36/volumes" Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.724794 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-metadata-0"] Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.784187 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-api-0"] Jan 23 08:53:47 crc kubenswrapper[4711]: W0123 08:53:47.785578 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf70146fe_2308_422b_9efa_42b334f7675f.slice/crio-bcebd15a6b25e59fd587e43dec5795345af2ea9efb40dbcd5d70efaac9bc0a48 WatchSource:0}: Error finding container bcebd15a6b25e59fd587e43dec5795345af2ea9efb40dbcd5d70efaac9bc0a48: Status 404 returned error can't find the container with id bcebd15a6b25e59fd587e43dec5795345af2ea9efb40dbcd5d70efaac9bc0a48 Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.845418 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"f70146fe-2308-422b-9efa-42b334f7675f","Type":"ContainerStarted","Data":"bcebd15a6b25e59fd587e43dec5795345af2ea9efb40dbcd5d70efaac9bc0a48"} Jan 23 08:53:47 crc kubenswrapper[4711]: I0123 08:53:47.848076 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"667ff795-4e58-403c-9f54-bd5c2ace5456","Type":"ContainerStarted","Data":"7fd04ef42d230b6ab879beae8ef83c2a70c0160d64d01d36b026d25a42c7a052"} Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.155488 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.178067 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/725a0187-c0a3-4653-a9e4-61f497b5f672-config-data\") pod \"725a0187-c0a3-4653-a9e4-61f497b5f672\" (UID: \"725a0187-c0a3-4653-a9e4-61f497b5f672\") " Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.178118 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pzgr\" (UniqueName: \"kubernetes.io/projected/725a0187-c0a3-4653-a9e4-61f497b5f672-kube-api-access-5pzgr\") pod \"725a0187-c0a3-4653-a9e4-61f497b5f672\" (UID: \"725a0187-c0a3-4653-a9e4-61f497b5f672\") " Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.182176 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/725a0187-c0a3-4653-a9e4-61f497b5f672-kube-api-access-5pzgr" (OuterVolumeSpecName: "kube-api-access-5pzgr") pod "725a0187-c0a3-4653-a9e4-61f497b5f672" (UID: "725a0187-c0a3-4653-a9e4-61f497b5f672"). InnerVolumeSpecName "kube-api-access-5pzgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.225687 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/725a0187-c0a3-4653-a9e4-61f497b5f672-config-data" (OuterVolumeSpecName: "config-data") pod "725a0187-c0a3-4653-a9e4-61f497b5f672" (UID: "725a0187-c0a3-4653-a9e4-61f497b5f672"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.279641 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/725a0187-c0a3-4653-a9e4-61f497b5f672-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.279684 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pzgr\" (UniqueName: \"kubernetes.io/projected/725a0187-c0a3-4653-a9e4-61f497b5f672-kube-api-access-5pzgr\") on node \"crc\" DevicePath \"\"" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.859938 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"f70146fe-2308-422b-9efa-42b334f7675f","Type":"ContainerStarted","Data":"a9157cacd6da3ef1689c95586ec15b5c1d2138aa9ff95722dc1916bd12b1c9b2"} Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.860202 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-api-0" event={"ID":"f70146fe-2308-422b-9efa-42b334f7675f","Type":"ContainerStarted","Data":"0a2604b36ac16f70d84cae8e7b0dc7458ab0308bb58030f288b477566d7e0823"} Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.862910 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"667ff795-4e58-403c-9f54-bd5c2ace5456","Type":"ContainerStarted","Data":"40b1656d43a31b7bbd5468e01b862efb8c4412bc15af3b4e021952978719703d"} Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.862963 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-metadata-0" event={"ID":"667ff795-4e58-403c-9f54-bd5c2ace5456","Type":"ContainerStarted","Data":"bbe7d9badf794e1807535647d450a2b86f68957befe13c726e37056c0ab87eb7"} Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.864676 4711 generic.go:334] "Generic (PLEG): container finished" podID="725a0187-c0a3-4653-a9e4-61f497b5f672" containerID="e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127" exitCode=0 Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.864724 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"725a0187-c0a3-4653-a9e4-61f497b5f672","Type":"ContainerDied","Data":"e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127"} Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.864737 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.864751 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"725a0187-c0a3-4653-a9e4-61f497b5f672","Type":"ContainerDied","Data":"1194d67f808c42303741f5ef0243958460777e3274b52df02b990d14c79f64c9"} Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.864779 4711 scope.go:117] "RemoveContainer" containerID="e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.881550 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-api-0" podStartSLOduration=2.881527941 podStartE2EDuration="2.881527941s" podCreationTimestamp="2026-01-23 08:53:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:48.876303643 +0000 UTC m=+2014.449260011" watchObservedRunningTime="2026-01-23 08:53:48.881527941 +0000 UTC m=+2014.454484329" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.895388 4711 scope.go:117] "RemoveContainer" containerID="e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127" Jan 23 08:53:48 crc kubenswrapper[4711]: E0123 08:53:48.899177 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127\": container with ID starting with e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127 not found: ID does not exist" containerID="e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.899224 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127"} err="failed to get container status \"e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127\": rpc error: code = NotFound desc = could not find container \"e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127\": container with ID starting with e056b28e57eb53c4edaeda5d56bf68ec89df5c0eb9bcf9c6add4a9001d39a127 not found: ID does not exist" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.905282 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-metadata-0" podStartSLOduration=2.905254993 podStartE2EDuration="2.905254993s" podCreationTimestamp="2026-01-23 08:53:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:48.899400419 +0000 UTC m=+2014.472356817" watchObservedRunningTime="2026-01-23 08:53:48.905254993 +0000 UTC m=+2014.478211371" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.926445 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.939875 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.951233 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:48 crc kubenswrapper[4711]: E0123 08:53:48.952024 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="725a0187-c0a3-4653-a9e4-61f497b5f672" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.952141 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="725a0187-c0a3-4653-a9e4-61f497b5f672" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.952363 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="725a0187-c0a3-4653-a9e4-61f497b5f672" containerName="nova-kuttl-scheduler-scheduler" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.953134 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.955609 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-scheduler-config-data" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.962082 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.992192 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44ba32b3-a02b-4ab5-a00c-90fb25eea139-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"44ba32b3-a02b-4ab5-a00c-90fb25eea139\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:48 crc kubenswrapper[4711]: I0123 08:53:48.992242 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phvvd\" (UniqueName: \"kubernetes.io/projected/44ba32b3-a02b-4ab5-a00c-90fb25eea139-kube-api-access-phvvd\") pod \"nova-kuttl-scheduler-0\" (UID: \"44ba32b3-a02b-4ab5-a00c-90fb25eea139\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:49 crc kubenswrapper[4711]: I0123 08:53:49.093352 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44ba32b3-a02b-4ab5-a00c-90fb25eea139-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"44ba32b3-a02b-4ab5-a00c-90fb25eea139\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:49 crc kubenswrapper[4711]: I0123 08:53:49.093430 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phvvd\" (UniqueName: \"kubernetes.io/projected/44ba32b3-a02b-4ab5-a00c-90fb25eea139-kube-api-access-phvvd\") pod \"nova-kuttl-scheduler-0\" (UID: \"44ba32b3-a02b-4ab5-a00c-90fb25eea139\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:49 crc kubenswrapper[4711]: I0123 08:53:49.100384 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44ba32b3-a02b-4ab5-a00c-90fb25eea139-config-data\") pod \"nova-kuttl-scheduler-0\" (UID: \"44ba32b3-a02b-4ab5-a00c-90fb25eea139\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:49 crc kubenswrapper[4711]: I0123 08:53:49.115875 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phvvd\" (UniqueName: \"kubernetes.io/projected/44ba32b3-a02b-4ab5-a00c-90fb25eea139-kube-api-access-phvvd\") pod \"nova-kuttl-scheduler-0\" (UID: \"44ba32b3-a02b-4ab5-a00c-90fb25eea139\") " pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:49 crc kubenswrapper[4711]: I0123 08:53:49.283312 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:49 crc kubenswrapper[4711]: I0123 08:53:49.483795 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="725a0187-c0a3-4653-a9e4-61f497b5f672" path="/var/lib/kubelet/pods/725a0187-c0a3-4653-a9e4-61f497b5f672/volumes" Jan 23 08:53:49 crc kubenswrapper[4711]: I0123 08:53:49.731630 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-scheduler-0"] Jan 23 08:53:49 crc kubenswrapper[4711]: W0123 08:53:49.742624 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44ba32b3_a02b_4ab5_a00c_90fb25eea139.slice/crio-d157adc4539c116d8c952c96a08cae22a4f894f51045c2e4a59a631e52a4cb76 WatchSource:0}: Error finding container d157adc4539c116d8c952c96a08cae22a4f894f51045c2e4a59a631e52a4cb76: Status 404 returned error can't find the container with id d157adc4539c116d8c952c96a08cae22a4f894f51045c2e4a59a631e52a4cb76 Jan 23 08:53:49 crc kubenswrapper[4711]: I0123 08:53:49.874014 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"44ba32b3-a02b-4ab5-a00c-90fb25eea139","Type":"ContainerStarted","Data":"d157adc4539c116d8c952c96a08cae22a4f894f51045c2e4a59a631e52a4cb76"} Jan 23 08:53:50 crc kubenswrapper[4711]: I0123 08:53:50.885424 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-scheduler-0" event={"ID":"44ba32b3-a02b-4ab5-a00c-90fb25eea139","Type":"ContainerStarted","Data":"abeb6aac4a4936edcaac35c71d9cb905e34697da3850ee40fcb9dec10a1278fd"} Jan 23 08:53:50 crc kubenswrapper[4711]: I0123 08:53:50.913031 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-scheduler-0" podStartSLOduration=2.913009882 podStartE2EDuration="2.913009882s" podCreationTimestamp="2026-01-23 08:53:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:53:50.903726664 +0000 UTC m=+2016.476683042" watchObservedRunningTime="2026-01-23 08:53:50.913009882 +0000 UTC m=+2016.485966250" Jan 23 08:53:52 crc kubenswrapper[4711]: I0123 08:53:52.275280 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:52 crc kubenswrapper[4711]: I0123 08:53:52.275633 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:54 crc kubenswrapper[4711]: I0123 08:53:54.284409 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:57 crc kubenswrapper[4711]: I0123 08:53:57.275953 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:57 crc kubenswrapper[4711]: I0123 08:53:57.276344 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:53:57 crc kubenswrapper[4711]: I0123 08:53:57.297079 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:57 crc kubenswrapper[4711]: I0123 08:53:57.297153 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:53:58 crc kubenswrapper[4711]: I0123 08:53:58.359723 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="667ff795-4e58-403c-9f54-bd5c2ace5456" containerName="nova-kuttl-metadata-log" probeResult="failure" output="Get \"http://10.217.0.234:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:53:58 crc kubenswrapper[4711]: I0123 08:53:58.401729 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="f70146fe-2308-422b-9efa-42b334f7675f" containerName="nova-kuttl-api-log" probeResult="failure" output="Get \"http://10.217.0.235:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:53:58 crc kubenswrapper[4711]: I0123 08:53:58.442704 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-metadata-0" podUID="667ff795-4e58-403c-9f54-bd5c2ace5456" containerName="nova-kuttl-metadata-metadata" probeResult="failure" output="Get \"http://10.217.0.234:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:53:58 crc kubenswrapper[4711]: I0123 08:53:58.442727 4711 prober.go:107] "Probe failed" probeType="Startup" pod="nova-kuttl-default/nova-kuttl-api-0" podUID="f70146fe-2308-422b-9efa-42b334f7675f" containerName="nova-kuttl-api-api" probeResult="failure" output="Get \"http://10.217.0.235:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 23 08:53:58 crc kubenswrapper[4711]: I0123 08:53:58.775502 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-slxcc"] Jan 23 08:53:58 crc kubenswrapper[4711]: I0123 08:53:58.777667 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:53:58 crc kubenswrapper[4711]: I0123 08:53:58.788346 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-slxcc"] Jan 23 08:53:58 crc kubenswrapper[4711]: I0123 08:53:58.949111 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-utilities\") pod \"redhat-operators-slxcc\" (UID: \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\") " pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:53:58 crc kubenswrapper[4711]: I0123 08:53:58.949193 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8khq\" (UniqueName: \"kubernetes.io/projected/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-kube-api-access-n8khq\") pod \"redhat-operators-slxcc\" (UID: \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\") " pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:53:58 crc kubenswrapper[4711]: I0123 08:53:58.949229 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-catalog-content\") pod \"redhat-operators-slxcc\" (UID: \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\") " pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:53:59 crc kubenswrapper[4711]: I0123 08:53:59.050351 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-utilities\") pod \"redhat-operators-slxcc\" (UID: \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\") " pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:53:59 crc kubenswrapper[4711]: I0123 08:53:59.050427 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8khq\" (UniqueName: \"kubernetes.io/projected/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-kube-api-access-n8khq\") pod \"redhat-operators-slxcc\" (UID: \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\") " pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:53:59 crc kubenswrapper[4711]: I0123 08:53:59.050463 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-catalog-content\") pod \"redhat-operators-slxcc\" (UID: \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\") " pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:53:59 crc kubenswrapper[4711]: I0123 08:53:59.051049 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-catalog-content\") pod \"redhat-operators-slxcc\" (UID: \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\") " pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:53:59 crc kubenswrapper[4711]: I0123 08:53:59.051115 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-utilities\") pod \"redhat-operators-slxcc\" (UID: \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\") " pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:53:59 crc kubenswrapper[4711]: I0123 08:53:59.078362 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8khq\" (UniqueName: \"kubernetes.io/projected/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-kube-api-access-n8khq\") pod \"redhat-operators-slxcc\" (UID: \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\") " pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:53:59 crc kubenswrapper[4711]: I0123 08:53:59.107248 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:53:59 crc kubenswrapper[4711]: I0123 08:53:59.283791 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:59 crc kubenswrapper[4711]: I0123 08:53:59.319728 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:53:59 crc kubenswrapper[4711]: I0123 08:53:59.611760 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-slxcc"] Jan 23 08:54:00 crc kubenswrapper[4711]: I0123 08:54:00.052472 4711 generic.go:334] "Generic (PLEG): container finished" podID="d639ad2b-aad1-4ef5-a321-f28f5ec089a6" containerID="6a11485508e592915ce6d3a424c726afbacaebf6bbb9bf57a45064a1736b65c4" exitCode=0 Jan 23 08:54:00 crc kubenswrapper[4711]: I0123 08:54:00.052549 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-slxcc" event={"ID":"d639ad2b-aad1-4ef5-a321-f28f5ec089a6","Type":"ContainerDied","Data":"6a11485508e592915ce6d3a424c726afbacaebf6bbb9bf57a45064a1736b65c4"} Jan 23 08:54:00 crc kubenswrapper[4711]: I0123 08:54:00.052600 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-slxcc" event={"ID":"d639ad2b-aad1-4ef5-a321-f28f5ec089a6","Type":"ContainerStarted","Data":"0f36557eb382b07b6b27c75c96e4fa0767da1a8b5acca8cf2e37d3b00356886a"} Jan 23 08:54:00 crc kubenswrapper[4711]: I0123 08:54:00.078175 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-scheduler-0" Jan 23 08:54:01 crc kubenswrapper[4711]: I0123 08:54:01.061640 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-slxcc" event={"ID":"d639ad2b-aad1-4ef5-a321-f28f5ec089a6","Type":"ContainerStarted","Data":"476fb36f701f3542b2edba08dc788960730ae7a0b8cfece6bd42ffda2bee998e"} Jan 23 08:54:02 crc kubenswrapper[4711]: I0123 08:54:02.073564 4711 generic.go:334] "Generic (PLEG): container finished" podID="d639ad2b-aad1-4ef5-a321-f28f5ec089a6" containerID="476fb36f701f3542b2edba08dc788960730ae7a0b8cfece6bd42ffda2bee998e" exitCode=0 Jan 23 08:54:02 crc kubenswrapper[4711]: I0123 08:54:02.073645 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-slxcc" event={"ID":"d639ad2b-aad1-4ef5-a321-f28f5ec089a6","Type":"ContainerDied","Data":"476fb36f701f3542b2edba08dc788960730ae7a0b8cfece6bd42ffda2bee998e"} Jan 23 08:54:03 crc kubenswrapper[4711]: I0123 08:54:03.090140 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-slxcc" event={"ID":"d639ad2b-aad1-4ef5-a321-f28f5ec089a6","Type":"ContainerStarted","Data":"317a746b8b977276fd65fc4bb25b36da3033411d6c31d9f07e1fcc879f46bf0c"} Jan 23 08:54:07 crc kubenswrapper[4711]: I0123 08:54:07.277877 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:54:07 crc kubenswrapper[4711]: I0123 08:54:07.278427 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:54:07 crc kubenswrapper[4711]: I0123 08:54:07.280176 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:54:07 crc kubenswrapper[4711]: I0123 08:54:07.280959 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-metadata-0" Jan 23 08:54:07 crc kubenswrapper[4711]: I0123 08:54:07.299594 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-slxcc" podStartSLOduration=6.693720846 podStartE2EDuration="9.299575111s" podCreationTimestamp="2026-01-23 08:53:58 +0000 UTC" firstStartedPulling="2026-01-23 08:54:00.054211245 +0000 UTC m=+2025.627167613" lastFinishedPulling="2026-01-23 08:54:02.66006549 +0000 UTC m=+2028.233021878" observedRunningTime="2026-01-23 08:54:03.119012347 +0000 UTC m=+2028.691968715" watchObservedRunningTime="2026-01-23 08:54:07.299575111 +0000 UTC m=+2032.872531469" Jan 23 08:54:07 crc kubenswrapper[4711]: I0123 08:54:07.301571 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:54:07 crc kubenswrapper[4711]: I0123 08:54:07.301902 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:54:07 crc kubenswrapper[4711]: I0123 08:54:07.302847 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:54:07 crc kubenswrapper[4711]: I0123 08:54:07.307093 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:54:08 crc kubenswrapper[4711]: I0123 08:54:08.130818 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:54:08 crc kubenswrapper[4711]: I0123 08:54:08.134729 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/nova-kuttl-api-0" Jan 23 08:54:09 crc kubenswrapper[4711]: I0123 08:54:09.107464 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:54:09 crc kubenswrapper[4711]: I0123 08:54:09.108256 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:54:09 crc kubenswrapper[4711]: I0123 08:54:09.152327 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:54:09 crc kubenswrapper[4711]: I0123 08:54:09.204112 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:54:09 crc kubenswrapper[4711]: I0123 08:54:09.389012 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-slxcc"] Jan 23 08:54:11 crc kubenswrapper[4711]: I0123 08:54:11.156938 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-slxcc" podUID="d639ad2b-aad1-4ef5-a321-f28f5ec089a6" containerName="registry-server" containerID="cri-o://317a746b8b977276fd65fc4bb25b36da3033411d6c31d9f07e1fcc879f46bf0c" gracePeriod=2 Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.198032 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz"] Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.199443 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.201792 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-manage-config-data" Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.216077 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz"] Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.216734 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-manage-scripts" Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.394875 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt5k9\" (UniqueName: \"kubernetes.io/projected/ff4efd5e-ae35-4d90-b767-7bfd505b441e-kube-api-access-mt5k9\") pod \"nova-kuttl-cell1-cell-delete-h5mxz\" (UID: \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.394993 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff4efd5e-ae35-4d90-b767-7bfd505b441e-scripts\") pod \"nova-kuttl-cell1-cell-delete-h5mxz\" (UID: \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.395270 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff4efd5e-ae35-4d90-b767-7bfd505b441e-config-data\") pod \"nova-kuttl-cell1-cell-delete-h5mxz\" (UID: \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.497441 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt5k9\" (UniqueName: \"kubernetes.io/projected/ff4efd5e-ae35-4d90-b767-7bfd505b441e-kube-api-access-mt5k9\") pod \"nova-kuttl-cell1-cell-delete-h5mxz\" (UID: \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.497552 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff4efd5e-ae35-4d90-b767-7bfd505b441e-scripts\") pod \"nova-kuttl-cell1-cell-delete-h5mxz\" (UID: \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.497764 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff4efd5e-ae35-4d90-b767-7bfd505b441e-config-data\") pod \"nova-kuttl-cell1-cell-delete-h5mxz\" (UID: \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.504493 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff4efd5e-ae35-4d90-b767-7bfd505b441e-scripts\") pod \"nova-kuttl-cell1-cell-delete-h5mxz\" (UID: \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.511056 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff4efd5e-ae35-4d90-b767-7bfd505b441e-config-data\") pod \"nova-kuttl-cell1-cell-delete-h5mxz\" (UID: \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.513499 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt5k9\" (UniqueName: \"kubernetes.io/projected/ff4efd5e-ae35-4d90-b767-7bfd505b441e-kube-api-access-mt5k9\") pod \"nova-kuttl-cell1-cell-delete-h5mxz\" (UID: \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.556812 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 08:54:13 crc kubenswrapper[4711]: W0123 08:54:13.987132 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff4efd5e_ae35_4d90_b767_7bfd505b441e.slice/crio-dec89bcc917b5ab2e849b341a5544d4edc8ed2ca090d8ad838dacf9e416216ed WatchSource:0}: Error finding container dec89bcc917b5ab2e849b341a5544d4edc8ed2ca090d8ad838dacf9e416216ed: Status 404 returned error can't find the container with id dec89bcc917b5ab2e849b341a5544d4edc8ed2ca090d8ad838dacf9e416216ed Jan 23 08:54:13 crc kubenswrapper[4711]: I0123 08:54:13.987483 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz"] Jan 23 08:54:14 crc kubenswrapper[4711]: I0123 08:54:14.181964 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerStarted","Data":"dec89bcc917b5ab2e849b341a5544d4edc8ed2ca090d8ad838dacf9e416216ed"} Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.592395 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.777244 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8khq\" (UniqueName: \"kubernetes.io/projected/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-kube-api-access-n8khq\") pod \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\" (UID: \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\") " Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.778824 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-catalog-content\") pod \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\" (UID: \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\") " Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.778908 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-utilities\") pod \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\" (UID: \"d639ad2b-aad1-4ef5-a321-f28f5ec089a6\") " Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.779815 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-utilities" (OuterVolumeSpecName: "utilities") pod "d639ad2b-aad1-4ef5-a321-f28f5ec089a6" (UID: "d639ad2b-aad1-4ef5-a321-f28f5ec089a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.784424 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-kube-api-access-n8khq" (OuterVolumeSpecName: "kube-api-access-n8khq") pod "d639ad2b-aad1-4ef5-a321-f28f5ec089a6" (UID: "d639ad2b-aad1-4ef5-a321-f28f5ec089a6"). InnerVolumeSpecName "kube-api-access-n8khq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.880362 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8khq\" (UniqueName: \"kubernetes.io/projected/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-kube-api-access-n8khq\") on node \"crc\" DevicePath \"\"" Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.880398 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.889464 4711 generic.go:334] "Generic (PLEG): container finished" podID="d639ad2b-aad1-4ef5-a321-f28f5ec089a6" containerID="317a746b8b977276fd65fc4bb25b36da3033411d6c31d9f07e1fcc879f46bf0c" exitCode=0 Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.889547 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-slxcc" event={"ID":"d639ad2b-aad1-4ef5-a321-f28f5ec089a6","Type":"ContainerDied","Data":"317a746b8b977276fd65fc4bb25b36da3033411d6c31d9f07e1fcc879f46bf0c"} Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.889857 4711 scope.go:117] "RemoveContainer" containerID="317a746b8b977276fd65fc4bb25b36da3033411d6c31d9f07e1fcc879f46bf0c" Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.928838 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d639ad2b-aad1-4ef5-a321-f28f5ec089a6" (UID: "d639ad2b-aad1-4ef5-a321-f28f5ec089a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:54:17 crc kubenswrapper[4711]: I0123 08:54:17.982345 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d639ad2b-aad1-4ef5-a321-f28f5ec089a6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:54:18 crc kubenswrapper[4711]: I0123 08:54:18.428816 4711 scope.go:117] "RemoveContainer" containerID="476fb36f701f3542b2edba08dc788960730ae7a0b8cfece6bd42ffda2bee998e" Jan 23 08:54:18 crc kubenswrapper[4711]: I0123 08:54:18.461009 4711 scope.go:117] "RemoveContainer" containerID="6a11485508e592915ce6d3a424c726afbacaebf6bbb9bf57a45064a1736b65c4" Jan 23 08:54:18 crc kubenswrapper[4711]: I0123 08:54:18.901133 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-slxcc" Jan 23 08:54:18 crc kubenswrapper[4711]: I0123 08:54:18.901174 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-slxcc" event={"ID":"d639ad2b-aad1-4ef5-a321-f28f5ec089a6","Type":"ContainerDied","Data":"0f36557eb382b07b6b27c75c96e4fa0767da1a8b5acca8cf2e37d3b00356886a"} Jan 23 08:54:18 crc kubenswrapper[4711]: I0123 08:54:18.903211 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerStarted","Data":"6f1050c5d7e25bde7828344a175e37b16857fd3ae64e168f0051991ae4c21486"} Jan 23 08:54:18 crc kubenswrapper[4711]: I0123 08:54:18.933886 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podStartSLOduration=5.933865086 podStartE2EDuration="5.933865086s" podCreationTimestamp="2026-01-23 08:54:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 08:54:18.923922603 +0000 UTC m=+2044.496878981" watchObservedRunningTime="2026-01-23 08:54:18.933865086 +0000 UTC m=+2044.506821464" Jan 23 08:54:18 crc kubenswrapper[4711]: I0123 08:54:18.957310 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-slxcc"] Jan 23 08:54:18 crc kubenswrapper[4711]: I0123 08:54:18.964738 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-slxcc"] Jan 23 08:54:19 crc kubenswrapper[4711]: I0123 08:54:19.485853 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d639ad2b-aad1-4ef5-a321-f28f5ec089a6" path="/var/lib/kubelet/pods/d639ad2b-aad1-4ef5-a321-f28f5ec089a6/volumes" Jan 23 08:54:20 crc kubenswrapper[4711]: I0123 08:54:20.662676 4711 scope.go:117] "RemoveContainer" containerID="1c562cec10b1575989444d35985de6eaedbd8ba83a34a7fa517b24c593936536" Jan 23 08:54:20 crc kubenswrapper[4711]: I0123 08:54:20.697316 4711 scope.go:117] "RemoveContainer" containerID="c37c6b568fa769199921cbfa931e492461b68d3680db861b37bf394111ef3576" Jan 23 08:54:20 crc kubenswrapper[4711]: I0123 08:54:20.729381 4711 scope.go:117] "RemoveContainer" containerID="5672a92c5e490320e6276ce3cfe62f369e52d799ddbfc4498b8560fa46f91832" Jan 23 08:54:20 crc kubenswrapper[4711]: I0123 08:54:20.784860 4711 scope.go:117] "RemoveContainer" containerID="6a93b5ad381ef5d0dcca27897e98d07b366a221c0a7579d7b8fc3b33527518af" Jan 23 08:54:20 crc kubenswrapper[4711]: I0123 08:54:20.813901 4711 scope.go:117] "RemoveContainer" containerID="36b86eab09dee22ec9f115bb2e075374a728b6174c014138f80a6c7598a4723a" Jan 23 08:54:20 crc kubenswrapper[4711]: I0123 08:54:20.841069 4711 scope.go:117] "RemoveContainer" containerID="0bd27b971d233e79b0d00816b739c3be6c8a2406f8152bf429ac3cd7a2e6edca" Jan 23 08:54:20 crc kubenswrapper[4711]: I0123 08:54:20.879250 4711 scope.go:117] "RemoveContainer" containerID="d5cc5ce4207b3c97d647aa53dbb8b465a7f3afd37fac958d4c49c3520124600b" Jan 23 08:54:20 crc kubenswrapper[4711]: I0123 08:54:20.898080 4711 scope.go:117] "RemoveContainer" containerID="840ab4013f76229bffb40357995d854bef2a196cc1e80c7ba9737b4a4a23a9c6" Jan 23 08:54:20 crc kubenswrapper[4711]: I0123 08:54:20.917572 4711 scope.go:117] "RemoveContainer" containerID="113b88d1f806ba6aa7c338ee91953eb99c9ea9560b0f4c6bfb3ff472e57d7221" Jan 23 08:54:22 crc kubenswrapper[4711]: I0123 08:54:22.951118 4711 generic.go:334] "Generic (PLEG): container finished" podID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerID="6f1050c5d7e25bde7828344a175e37b16857fd3ae64e168f0051991ae4c21486" exitCode=2 Jan 23 08:54:22 crc kubenswrapper[4711]: I0123 08:54:22.951193 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerDied","Data":"6f1050c5d7e25bde7828344a175e37b16857fd3ae64e168f0051991ae4c21486"} Jan 23 08:54:22 crc kubenswrapper[4711]: I0123 08:54:22.951787 4711 scope.go:117] "RemoveContainer" containerID="6f1050c5d7e25bde7828344a175e37b16857fd3ae64e168f0051991ae4c21486" Jan 23 08:54:23 crc kubenswrapper[4711]: I0123 08:54:23.962795 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerStarted","Data":"96e760b082242b6cdab1ba0f3907ad5af890c2e9981fa5ce4fd62ddb4a16ce71"} Jan 23 08:54:28 crc kubenswrapper[4711]: I0123 08:54:27.999631 4711 generic.go:334] "Generic (PLEG): container finished" podID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerID="96e760b082242b6cdab1ba0f3907ad5af890c2e9981fa5ce4fd62ddb4a16ce71" exitCode=2 Jan 23 08:54:28 crc kubenswrapper[4711]: I0123 08:54:27.999735 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerDied","Data":"96e760b082242b6cdab1ba0f3907ad5af890c2e9981fa5ce4fd62ddb4a16ce71"} Jan 23 08:54:28 crc kubenswrapper[4711]: I0123 08:54:28.000293 4711 scope.go:117] "RemoveContainer" containerID="6f1050c5d7e25bde7828344a175e37b16857fd3ae64e168f0051991ae4c21486" Jan 23 08:54:28 crc kubenswrapper[4711]: I0123 08:54:28.000944 4711 scope.go:117] "RemoveContainer" containerID="96e760b082242b6cdab1ba0f3907ad5af890c2e9981fa5ce4fd62ddb4a16ce71" Jan 23 08:54:28 crc kubenswrapper[4711]: E0123 08:54:28.001241 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:54:39 crc kubenswrapper[4711]: I0123 08:54:39.474172 4711 scope.go:117] "RemoveContainer" containerID="96e760b082242b6cdab1ba0f3907ad5af890c2e9981fa5ce4fd62ddb4a16ce71" Jan 23 08:54:40 crc kubenswrapper[4711]: I0123 08:54:40.111553 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerStarted","Data":"32af6ca0c5a97225323fe4e5a019bda1716920a434ebf0493627980a74fc9898"} Jan 23 08:54:45 crc kubenswrapper[4711]: I0123 08:54:45.165239 4711 generic.go:334] "Generic (PLEG): container finished" podID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerID="32af6ca0c5a97225323fe4e5a019bda1716920a434ebf0493627980a74fc9898" exitCode=2 Jan 23 08:54:45 crc kubenswrapper[4711]: I0123 08:54:45.165318 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerDied","Data":"32af6ca0c5a97225323fe4e5a019bda1716920a434ebf0493627980a74fc9898"} Jan 23 08:54:45 crc kubenswrapper[4711]: I0123 08:54:45.165599 4711 scope.go:117] "RemoveContainer" containerID="96e760b082242b6cdab1ba0f3907ad5af890c2e9981fa5ce4fd62ddb4a16ce71" Jan 23 08:54:45 crc kubenswrapper[4711]: I0123 08:54:45.166485 4711 scope.go:117] "RemoveContainer" containerID="32af6ca0c5a97225323fe4e5a019bda1716920a434ebf0493627980a74fc9898" Jan 23 08:54:45 crc kubenswrapper[4711]: E0123 08:54:45.166761 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:54:53 crc kubenswrapper[4711]: I0123 08:54:53.927447 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lk4d5"] Jan 23 08:54:53 crc kubenswrapper[4711]: E0123 08:54:53.928298 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d639ad2b-aad1-4ef5-a321-f28f5ec089a6" containerName="registry-server" Jan 23 08:54:53 crc kubenswrapper[4711]: I0123 08:54:53.928309 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d639ad2b-aad1-4ef5-a321-f28f5ec089a6" containerName="registry-server" Jan 23 08:54:53 crc kubenswrapper[4711]: E0123 08:54:53.928321 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d639ad2b-aad1-4ef5-a321-f28f5ec089a6" containerName="extract-content" Jan 23 08:54:53 crc kubenswrapper[4711]: I0123 08:54:53.928326 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d639ad2b-aad1-4ef5-a321-f28f5ec089a6" containerName="extract-content" Jan 23 08:54:53 crc kubenswrapper[4711]: E0123 08:54:53.928344 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d639ad2b-aad1-4ef5-a321-f28f5ec089a6" containerName="extract-utilities" Jan 23 08:54:53 crc kubenswrapper[4711]: I0123 08:54:53.928350 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="d639ad2b-aad1-4ef5-a321-f28f5ec089a6" containerName="extract-utilities" Jan 23 08:54:53 crc kubenswrapper[4711]: I0123 08:54:53.928497 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="d639ad2b-aad1-4ef5-a321-f28f5ec089a6" containerName="registry-server" Jan 23 08:54:53 crc kubenswrapper[4711]: I0123 08:54:53.929744 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:54:53 crc kubenswrapper[4711]: I0123 08:54:53.938935 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lk4d5"] Jan 23 08:54:54 crc kubenswrapper[4711]: I0123 08:54:54.065467 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-utilities\") pod \"redhat-marketplace-lk4d5\" (UID: \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\") " pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:54:54 crc kubenswrapper[4711]: I0123 08:54:54.066079 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsmjp\" (UniqueName: \"kubernetes.io/projected/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-kube-api-access-vsmjp\") pod \"redhat-marketplace-lk4d5\" (UID: \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\") " pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:54:54 crc kubenswrapper[4711]: I0123 08:54:54.066162 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-catalog-content\") pod \"redhat-marketplace-lk4d5\" (UID: \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\") " pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:54:54 crc kubenswrapper[4711]: I0123 08:54:54.168080 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsmjp\" (UniqueName: \"kubernetes.io/projected/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-kube-api-access-vsmjp\") pod \"redhat-marketplace-lk4d5\" (UID: \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\") " pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:54:54 crc kubenswrapper[4711]: I0123 08:54:54.168160 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-catalog-content\") pod \"redhat-marketplace-lk4d5\" (UID: \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\") " pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:54:54 crc kubenswrapper[4711]: I0123 08:54:54.168188 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-utilities\") pod \"redhat-marketplace-lk4d5\" (UID: \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\") " pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:54:54 crc kubenswrapper[4711]: I0123 08:54:54.168596 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-catalog-content\") pod \"redhat-marketplace-lk4d5\" (UID: \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\") " pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:54:54 crc kubenswrapper[4711]: I0123 08:54:54.168811 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-utilities\") pod \"redhat-marketplace-lk4d5\" (UID: \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\") " pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:54:54 crc kubenswrapper[4711]: I0123 08:54:54.188103 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsmjp\" (UniqueName: \"kubernetes.io/projected/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-kube-api-access-vsmjp\") pod \"redhat-marketplace-lk4d5\" (UID: \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\") " pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:54:54 crc kubenswrapper[4711]: I0123 08:54:54.254447 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:54:54 crc kubenswrapper[4711]: I0123 08:54:54.729236 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lk4d5"] Jan 23 08:54:55 crc kubenswrapper[4711]: I0123 08:54:55.247827 4711 generic.go:334] "Generic (PLEG): container finished" podID="6f07f84b-4db9-4a0d-942c-97c1f363d2d4" containerID="17522e09a2829329f159bbe5f4b368f276b0fba986de7bcf4963434bc86c2530" exitCode=0 Jan 23 08:54:55 crc kubenswrapper[4711]: I0123 08:54:55.247922 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lk4d5" event={"ID":"6f07f84b-4db9-4a0d-942c-97c1f363d2d4","Type":"ContainerDied","Data":"17522e09a2829329f159bbe5f4b368f276b0fba986de7bcf4963434bc86c2530"} Jan 23 08:54:55 crc kubenswrapper[4711]: I0123 08:54:55.248147 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lk4d5" event={"ID":"6f07f84b-4db9-4a0d-942c-97c1f363d2d4","Type":"ContainerStarted","Data":"1d81876ecce7c83f69b0732f377b5620a19ebba370a7fe7a5ab15f6df5dbcbc0"} Jan 23 08:54:56 crc kubenswrapper[4711]: I0123 08:54:56.262061 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lk4d5" event={"ID":"6f07f84b-4db9-4a0d-942c-97c1f363d2d4","Type":"ContainerStarted","Data":"4b76b24451fcc8fee281aaee83e5bab4edaab8a2bb7c1e8f4dbd48ca9a46d412"} Jan 23 08:54:57 crc kubenswrapper[4711]: I0123 08:54:57.272348 4711 generic.go:334] "Generic (PLEG): container finished" podID="6f07f84b-4db9-4a0d-942c-97c1f363d2d4" containerID="4b76b24451fcc8fee281aaee83e5bab4edaab8a2bb7c1e8f4dbd48ca9a46d412" exitCode=0 Jan 23 08:54:57 crc kubenswrapper[4711]: I0123 08:54:57.272387 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lk4d5" event={"ID":"6f07f84b-4db9-4a0d-942c-97c1f363d2d4","Type":"ContainerDied","Data":"4b76b24451fcc8fee281aaee83e5bab4edaab8a2bb7c1e8f4dbd48ca9a46d412"} Jan 23 08:54:58 crc kubenswrapper[4711]: I0123 08:54:58.474155 4711 scope.go:117] "RemoveContainer" containerID="32af6ca0c5a97225323fe4e5a019bda1716920a434ebf0493627980a74fc9898" Jan 23 08:54:58 crc kubenswrapper[4711]: E0123 08:54:58.474564 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:55:00 crc kubenswrapper[4711]: I0123 08:55:00.300415 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lk4d5" event={"ID":"6f07f84b-4db9-4a0d-942c-97c1f363d2d4","Type":"ContainerStarted","Data":"7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1"} Jan 23 08:55:00 crc kubenswrapper[4711]: I0123 08:55:00.325778 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lk4d5" podStartSLOduration=3.7540890620000003 podStartE2EDuration="7.325761655s" podCreationTimestamp="2026-01-23 08:54:53 +0000 UTC" firstStartedPulling="2026-01-23 08:54:55.249295497 +0000 UTC m=+2080.822251865" lastFinishedPulling="2026-01-23 08:54:58.82096809 +0000 UTC m=+2084.393924458" observedRunningTime="2026-01-23 08:55:00.319270615 +0000 UTC m=+2085.892226973" watchObservedRunningTime="2026-01-23 08:55:00.325761655 +0000 UTC m=+2085.898718023" Jan 23 08:55:04 crc kubenswrapper[4711]: I0123 08:55:04.255055 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:55:04 crc kubenswrapper[4711]: I0123 08:55:04.255152 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:55:04 crc kubenswrapper[4711]: I0123 08:55:04.304032 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:55:04 crc kubenswrapper[4711]: I0123 08:55:04.371390 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:55:04 crc kubenswrapper[4711]: I0123 08:55:04.533496 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lk4d5"] Jan 23 08:55:06 crc kubenswrapper[4711]: I0123 08:55:06.341311 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lk4d5" podUID="6f07f84b-4db9-4a0d-942c-97c1f363d2d4" containerName="registry-server" containerID="cri-o://7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1" gracePeriod=2 Jan 23 08:55:06 crc kubenswrapper[4711]: I0123 08:55:06.765886 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:55:06 crc kubenswrapper[4711]: I0123 08:55:06.866228 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-catalog-content\") pod \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\" (UID: \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\") " Jan 23 08:55:06 crc kubenswrapper[4711]: I0123 08:55:06.866442 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-utilities\") pod \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\" (UID: \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\") " Jan 23 08:55:06 crc kubenswrapper[4711]: I0123 08:55:06.866484 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsmjp\" (UniqueName: \"kubernetes.io/projected/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-kube-api-access-vsmjp\") pod \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\" (UID: \"6f07f84b-4db9-4a0d-942c-97c1f363d2d4\") " Jan 23 08:55:06 crc kubenswrapper[4711]: I0123 08:55:06.867260 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-utilities" (OuterVolumeSpecName: "utilities") pod "6f07f84b-4db9-4a0d-942c-97c1f363d2d4" (UID: "6f07f84b-4db9-4a0d-942c-97c1f363d2d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:55:06 crc kubenswrapper[4711]: I0123 08:55:06.872013 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-kube-api-access-vsmjp" (OuterVolumeSpecName: "kube-api-access-vsmjp") pod "6f07f84b-4db9-4a0d-942c-97c1f363d2d4" (UID: "6f07f84b-4db9-4a0d-942c-97c1f363d2d4"). InnerVolumeSpecName "kube-api-access-vsmjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:55:06 crc kubenswrapper[4711]: I0123 08:55:06.892786 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6f07f84b-4db9-4a0d-942c-97c1f363d2d4" (UID: "6f07f84b-4db9-4a0d-942c-97c1f363d2d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:55:06 crc kubenswrapper[4711]: I0123 08:55:06.968529 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:55:06 crc kubenswrapper[4711]: I0123 08:55:06.968569 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsmjp\" (UniqueName: \"kubernetes.io/projected/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-kube-api-access-vsmjp\") on node \"crc\" DevicePath \"\"" Jan 23 08:55:06 crc kubenswrapper[4711]: I0123 08:55:06.968581 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f07f84b-4db9-4a0d-942c-97c1f363d2d4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.353676 4711 generic.go:334] "Generic (PLEG): container finished" podID="6f07f84b-4db9-4a0d-942c-97c1f363d2d4" containerID="7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1" exitCode=0 Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.353719 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lk4d5" event={"ID":"6f07f84b-4db9-4a0d-942c-97c1f363d2d4","Type":"ContainerDied","Data":"7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1"} Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.353746 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lk4d5" event={"ID":"6f07f84b-4db9-4a0d-942c-97c1f363d2d4","Type":"ContainerDied","Data":"1d81876ecce7c83f69b0732f377b5620a19ebba370a7fe7a5ab15f6df5dbcbc0"} Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.353754 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lk4d5" Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.353762 4711 scope.go:117] "RemoveContainer" containerID="7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1" Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.377767 4711 scope.go:117] "RemoveContainer" containerID="4b76b24451fcc8fee281aaee83e5bab4edaab8a2bb7c1e8f4dbd48ca9a46d412" Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.395892 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lk4d5"] Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.399228 4711 scope.go:117] "RemoveContainer" containerID="17522e09a2829329f159bbe5f4b368f276b0fba986de7bcf4963434bc86c2530" Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.404217 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lk4d5"] Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.441731 4711 scope.go:117] "RemoveContainer" containerID="7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1" Jan 23 08:55:07 crc kubenswrapper[4711]: E0123 08:55:07.442289 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1\": container with ID starting with 7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1 not found: ID does not exist" containerID="7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1" Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.442335 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1"} err="failed to get container status \"7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1\": rpc error: code = NotFound desc = could not find container \"7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1\": container with ID starting with 7246afe217849986c43914c1dae658df4f871e8defa53e0df6462125dd1b56a1 not found: ID does not exist" Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.442364 4711 scope.go:117] "RemoveContainer" containerID="4b76b24451fcc8fee281aaee83e5bab4edaab8a2bb7c1e8f4dbd48ca9a46d412" Jan 23 08:55:07 crc kubenswrapper[4711]: E0123 08:55:07.443292 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b76b24451fcc8fee281aaee83e5bab4edaab8a2bb7c1e8f4dbd48ca9a46d412\": container with ID starting with 4b76b24451fcc8fee281aaee83e5bab4edaab8a2bb7c1e8f4dbd48ca9a46d412 not found: ID does not exist" containerID="4b76b24451fcc8fee281aaee83e5bab4edaab8a2bb7c1e8f4dbd48ca9a46d412" Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.443351 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b76b24451fcc8fee281aaee83e5bab4edaab8a2bb7c1e8f4dbd48ca9a46d412"} err="failed to get container status \"4b76b24451fcc8fee281aaee83e5bab4edaab8a2bb7c1e8f4dbd48ca9a46d412\": rpc error: code = NotFound desc = could not find container \"4b76b24451fcc8fee281aaee83e5bab4edaab8a2bb7c1e8f4dbd48ca9a46d412\": container with ID starting with 4b76b24451fcc8fee281aaee83e5bab4edaab8a2bb7c1e8f4dbd48ca9a46d412 not found: ID does not exist" Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.443389 4711 scope.go:117] "RemoveContainer" containerID="17522e09a2829329f159bbe5f4b368f276b0fba986de7bcf4963434bc86c2530" Jan 23 08:55:07 crc kubenswrapper[4711]: E0123 08:55:07.443877 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17522e09a2829329f159bbe5f4b368f276b0fba986de7bcf4963434bc86c2530\": container with ID starting with 17522e09a2829329f159bbe5f4b368f276b0fba986de7bcf4963434bc86c2530 not found: ID does not exist" containerID="17522e09a2829329f159bbe5f4b368f276b0fba986de7bcf4963434bc86c2530" Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.444300 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17522e09a2829329f159bbe5f4b368f276b0fba986de7bcf4963434bc86c2530"} err="failed to get container status \"17522e09a2829329f159bbe5f4b368f276b0fba986de7bcf4963434bc86c2530\": rpc error: code = NotFound desc = could not find container \"17522e09a2829329f159bbe5f4b368f276b0fba986de7bcf4963434bc86c2530\": container with ID starting with 17522e09a2829329f159bbe5f4b368f276b0fba986de7bcf4963434bc86c2530 not found: ID does not exist" Jan 23 08:55:07 crc kubenswrapper[4711]: I0123 08:55:07.485891 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f07f84b-4db9-4a0d-942c-97c1f363d2d4" path="/var/lib/kubelet/pods/6f07f84b-4db9-4a0d-942c-97c1f363d2d4/volumes" Jan 23 08:55:11 crc kubenswrapper[4711]: I0123 08:55:11.474461 4711 scope.go:117] "RemoveContainer" containerID="32af6ca0c5a97225323fe4e5a019bda1716920a434ebf0493627980a74fc9898" Jan 23 08:55:12 crc kubenswrapper[4711]: I0123 08:55:12.404615 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerStarted","Data":"8a5ff92ed12112776a4b8f8721a43e80e6919246debf329b16b3cc52eb64bac3"} Jan 23 08:55:17 crc kubenswrapper[4711]: I0123 08:55:17.446817 4711 generic.go:334] "Generic (PLEG): container finished" podID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerID="8a5ff92ed12112776a4b8f8721a43e80e6919246debf329b16b3cc52eb64bac3" exitCode=2 Jan 23 08:55:17 crc kubenswrapper[4711]: I0123 08:55:17.446906 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerDied","Data":"8a5ff92ed12112776a4b8f8721a43e80e6919246debf329b16b3cc52eb64bac3"} Jan 23 08:55:17 crc kubenswrapper[4711]: I0123 08:55:17.447592 4711 scope.go:117] "RemoveContainer" containerID="32af6ca0c5a97225323fe4e5a019bda1716920a434ebf0493627980a74fc9898" Jan 23 08:55:17 crc kubenswrapper[4711]: I0123 08:55:17.448047 4711 scope.go:117] "RemoveContainer" containerID="8a5ff92ed12112776a4b8f8721a43e80e6919246debf329b16b3cc52eb64bac3" Jan 23 08:55:17 crc kubenswrapper[4711]: E0123 08:55:17.448236 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:55:21 crc kubenswrapper[4711]: I0123 08:55:21.325526 4711 scope.go:117] "RemoveContainer" containerID="328adb3acdaa5d43f27cdee12f97e8a5b4e75490b7ace520bf4af4f7ff02e4c9" Jan 23 08:55:21 crc kubenswrapper[4711]: I0123 08:55:21.344458 4711 scope.go:117] "RemoveContainer" containerID="992d2b13977571fdabd23e5d781f6a965f4e03753fbc32cc44cdfeea87a51504" Jan 23 08:55:21 crc kubenswrapper[4711]: I0123 08:55:21.375391 4711 scope.go:117] "RemoveContainer" containerID="0df3a2a8b47c6371146331aae739f5f00207a75c22cb222c62519a7c798331d4" Jan 23 08:55:21 crc kubenswrapper[4711]: I0123 08:55:21.427399 4711 scope.go:117] "RemoveContainer" containerID="80771f604e4ec6f268a0dbb6a11e67f206a15dfaeea948a0116c17f0dc4f7f7a" Jan 23 08:55:21 crc kubenswrapper[4711]: I0123 08:55:21.463973 4711 scope.go:117] "RemoveContainer" containerID="c8d3881e274a6b070aff46f00b3cd110c1acaee33c0d643828bcc7126b8f51eb" Jan 23 08:55:28 crc kubenswrapper[4711]: I0123 08:55:28.473925 4711 scope.go:117] "RemoveContainer" containerID="8a5ff92ed12112776a4b8f8721a43e80e6919246debf329b16b3cc52eb64bac3" Jan 23 08:55:28 crc kubenswrapper[4711]: E0123 08:55:28.474574 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:55:39 crc kubenswrapper[4711]: I0123 08:55:39.473745 4711 scope.go:117] "RemoveContainer" containerID="8a5ff92ed12112776a4b8f8721a43e80e6919246debf329b16b3cc52eb64bac3" Jan 23 08:55:39 crc kubenswrapper[4711]: E0123 08:55:39.474566 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:55:54 crc kubenswrapper[4711]: I0123 08:55:54.474055 4711 scope.go:117] "RemoveContainer" containerID="8a5ff92ed12112776a4b8f8721a43e80e6919246debf329b16b3cc52eb64bac3" Jan 23 08:55:54 crc kubenswrapper[4711]: E0123 08:55:54.475198 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:55:55 crc kubenswrapper[4711]: I0123 08:55:55.993367 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:55:55 crc kubenswrapper[4711]: I0123 08:55:55.993841 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:56:06 crc kubenswrapper[4711]: I0123 08:56:06.474869 4711 scope.go:117] "RemoveContainer" containerID="8a5ff92ed12112776a4b8f8721a43e80e6919246debf329b16b3cc52eb64bac3" Jan 23 08:56:06 crc kubenswrapper[4711]: I0123 08:56:06.845284 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerStarted","Data":"fdbb106a3879f7516c0b2dbed69bf60ac89683810be832495738fdbf1c5c3492"} Jan 23 08:56:11 crc kubenswrapper[4711]: I0123 08:56:11.898217 4711 generic.go:334] "Generic (PLEG): container finished" podID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerID="fdbb106a3879f7516c0b2dbed69bf60ac89683810be832495738fdbf1c5c3492" exitCode=2 Jan 23 08:56:11 crc kubenswrapper[4711]: I0123 08:56:11.898263 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerDied","Data":"fdbb106a3879f7516c0b2dbed69bf60ac89683810be832495738fdbf1c5c3492"} Jan 23 08:56:11 crc kubenswrapper[4711]: I0123 08:56:11.899081 4711 scope.go:117] "RemoveContainer" containerID="8a5ff92ed12112776a4b8f8721a43e80e6919246debf329b16b3cc52eb64bac3" Jan 23 08:56:11 crc kubenswrapper[4711]: I0123 08:56:11.899699 4711 scope.go:117] "RemoveContainer" containerID="fdbb106a3879f7516c0b2dbed69bf60ac89683810be832495738fdbf1c5c3492" Jan 23 08:56:11 crc kubenswrapper[4711]: E0123 08:56:11.899948 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:56:21 crc kubenswrapper[4711]: I0123 08:56:21.597063 4711 scope.go:117] "RemoveContainer" containerID="c4caecb3a4094896b4afdab10ba38a00b1a5b757ee146e1bc76b917c287788dd" Jan 23 08:56:21 crc kubenswrapper[4711]: I0123 08:56:21.622632 4711 scope.go:117] "RemoveContainer" containerID="06dc3dccdf9391925cc6b2e757a2102f7ac109f205eeeb4f3a00541e99be56e5" Jan 23 08:56:25 crc kubenswrapper[4711]: I0123 08:56:25.994198 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:56:25 crc kubenswrapper[4711]: I0123 08:56:25.994945 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:56:26 crc kubenswrapper[4711]: I0123 08:56:26.473684 4711 scope.go:117] "RemoveContainer" containerID="fdbb106a3879f7516c0b2dbed69bf60ac89683810be832495738fdbf1c5c3492" Jan 23 08:56:26 crc kubenswrapper[4711]: E0123 08:56:26.473911 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:56:38 crc kubenswrapper[4711]: I0123 08:56:38.474124 4711 scope.go:117] "RemoveContainer" containerID="fdbb106a3879f7516c0b2dbed69bf60ac89683810be832495738fdbf1c5c3492" Jan 23 08:56:38 crc kubenswrapper[4711]: E0123 08:56:38.474862 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:56:52 crc kubenswrapper[4711]: I0123 08:56:52.474418 4711 scope.go:117] "RemoveContainer" containerID="fdbb106a3879f7516c0b2dbed69bf60ac89683810be832495738fdbf1c5c3492" Jan 23 08:56:52 crc kubenswrapper[4711]: E0123 08:56:52.475497 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:56:55 crc kubenswrapper[4711]: I0123 08:56:55.993758 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:56:55 crc kubenswrapper[4711]: I0123 08:56:55.994412 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:56:55 crc kubenswrapper[4711]: I0123 08:56:55.994474 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 08:56:55 crc kubenswrapper[4711]: I0123 08:56:55.995589 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6fc3e292f98695a914f968c4697ab5b4100d4aad931a282b95317fbc924708bc"} pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 23 08:56:55 crc kubenswrapper[4711]: I0123 08:56:55.995698 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" containerID="cri-o://6fc3e292f98695a914f968c4697ab5b4100d4aad931a282b95317fbc924708bc" gracePeriod=600 Jan 23 08:56:58 crc kubenswrapper[4711]: I0123 08:56:58.277358 4711 generic.go:334] "Generic (PLEG): container finished" podID="3846d4e0-cfda-4e0b-8747-85267de12736" containerID="6fc3e292f98695a914f968c4697ab5b4100d4aad931a282b95317fbc924708bc" exitCode=0 Jan 23 08:56:58 crc kubenswrapper[4711]: I0123 08:56:58.277458 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerDied","Data":"6fc3e292f98695a914f968c4697ab5b4100d4aad931a282b95317fbc924708bc"} Jan 23 08:56:58 crc kubenswrapper[4711]: I0123 08:56:58.278060 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7"} Jan 23 08:56:58 crc kubenswrapper[4711]: I0123 08:56:58.278082 4711 scope.go:117] "RemoveContainer" containerID="3594ecf06a2b02f092e55ade883bc32152525c98ad4d56098421388532f7d058" Jan 23 08:57:03 crc kubenswrapper[4711]: I0123 08:57:03.473576 4711 scope.go:117] "RemoveContainer" containerID="fdbb106a3879f7516c0b2dbed69bf60ac89683810be832495738fdbf1c5c3492" Jan 23 08:57:03 crc kubenswrapper[4711]: E0123 08:57:03.474231 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:57:15 crc kubenswrapper[4711]: I0123 08:57:15.483420 4711 scope.go:117] "RemoveContainer" containerID="fdbb106a3879f7516c0b2dbed69bf60ac89683810be832495738fdbf1c5c3492" Jan 23 08:57:15 crc kubenswrapper[4711]: E0123 08:57:15.484304 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:57:21 crc kubenswrapper[4711]: I0123 08:57:21.675064 4711 scope.go:117] "RemoveContainer" containerID="132dc4a43ff0db0ada9c91fe753f2d6ea8feb1e71e7e83749302512c0aed2088" Jan 23 08:57:21 crc kubenswrapper[4711]: I0123 08:57:21.709495 4711 scope.go:117] "RemoveContainer" containerID="eab49d4e3f0f5e8226ac520556eac418d99a675651681e9df655b1941e49bd9f" Jan 23 08:57:21 crc kubenswrapper[4711]: I0123 08:57:21.738890 4711 scope.go:117] "RemoveContainer" containerID="fea0ea333621314e8420200032d19561a341c6c861e4c0491d449bc245c0d205" Jan 23 08:57:21 crc kubenswrapper[4711]: I0123 08:57:21.787901 4711 scope.go:117] "RemoveContainer" containerID="ce121608a029f32c4a261f158c9adaae9d5f539921c67dd4bfa9695c380b671f" Jan 23 08:57:21 crc kubenswrapper[4711]: I0123 08:57:21.826848 4711 scope.go:117] "RemoveContainer" containerID="24524cd01531660f7627b1a911fe4db6cf5ff2f95a98b4dbbf0b748ad7d02881" Jan 23 08:57:21 crc kubenswrapper[4711]: I0123 08:57:21.851424 4711 scope.go:117] "RemoveContainer" containerID="5acdb3f73cbb6aebc6a3336c9a3529d7810dd980a5e9c497b46bf78dc0e031d3" Jan 23 08:57:21 crc kubenswrapper[4711]: I0123 08:57:21.896361 4711 scope.go:117] "RemoveContainer" containerID="1b9881e04c1afb44f8fb96c730e8e3c28cea7873e8d2e8ebba63850ad2c6e196" Jan 23 08:57:21 crc kubenswrapper[4711]: I0123 08:57:21.918142 4711 scope.go:117] "RemoveContainer" containerID="4789abcb6a8aa4883a0288c88291cf6c65f97469836b55b3d1ebb336c642f1d4" Jan 23 08:57:21 crc kubenswrapper[4711]: I0123 08:57:21.957581 4711 scope.go:117] "RemoveContainer" containerID="b5751fa7c1a99d51f8231ae4a46c96dc07890862caaa454c056e82d748719d2c" Jan 23 08:57:21 crc kubenswrapper[4711]: I0123 08:57:21.982972 4711 scope.go:117] "RemoveContainer" containerID="7ac63bbf095552435a63a8a9ba9f83945143d4b9d4ade93f66b8344fc15ce540" Jan 23 08:57:22 crc kubenswrapper[4711]: I0123 08:57:22.002685 4711 scope.go:117] "RemoveContainer" containerID="bbc643566238ed209a4d4f3b85579b2657ee807144b1f4756f667fd094dcd8e7" Jan 23 08:57:22 crc kubenswrapper[4711]: I0123 08:57:22.018238 4711 scope.go:117] "RemoveContainer" containerID="64c0c38b4bd5bf4a3a16cf025f8cc0860572e829c7f231d05f68f05240fb936c" Jan 23 08:57:22 crc kubenswrapper[4711]: I0123 08:57:22.054375 4711 scope.go:117] "RemoveContainer" containerID="0932d432ac510c8282e41859ec4057cc7efae1fe7b9f4dc95e818628e6c9934d" Jan 23 08:57:26 crc kubenswrapper[4711]: I0123 08:57:26.474032 4711 scope.go:117] "RemoveContainer" containerID="fdbb106a3879f7516c0b2dbed69bf60ac89683810be832495738fdbf1c5c3492" Jan 23 08:57:26 crc kubenswrapper[4711]: E0123 08:57:26.474669 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:57:40 crc kubenswrapper[4711]: I0123 08:57:40.474372 4711 scope.go:117] "RemoveContainer" containerID="fdbb106a3879f7516c0b2dbed69bf60ac89683810be832495738fdbf1c5c3492" Jan 23 08:57:41 crc kubenswrapper[4711]: I0123 08:57:41.666914 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerStarted","Data":"554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52"} Jan 23 08:57:45 crc kubenswrapper[4711]: I0123 08:57:45.701582 4711 generic.go:334] "Generic (PLEG): container finished" podID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" exitCode=2 Jan 23 08:57:45 crc kubenswrapper[4711]: I0123 08:57:45.701676 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerDied","Data":"554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52"} Jan 23 08:57:45 crc kubenswrapper[4711]: I0123 08:57:45.702141 4711 scope.go:117] "RemoveContainer" containerID="fdbb106a3879f7516c0b2dbed69bf60ac89683810be832495738fdbf1c5c3492" Jan 23 08:57:45 crc kubenswrapper[4711]: I0123 08:57:45.702616 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 08:57:45 crc kubenswrapper[4711]: E0123 08:57:45.702818 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.088818 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-w75rd"] Jan 23 08:57:49 crc kubenswrapper[4711]: E0123 08:57:49.089621 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f07f84b-4db9-4a0d-942c-97c1f363d2d4" containerName="extract-content" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.089639 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f07f84b-4db9-4a0d-942c-97c1f363d2d4" containerName="extract-content" Jan 23 08:57:49 crc kubenswrapper[4711]: E0123 08:57:49.089659 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f07f84b-4db9-4a0d-942c-97c1f363d2d4" containerName="extract-utilities" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.089668 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f07f84b-4db9-4a0d-942c-97c1f363d2d4" containerName="extract-utilities" Jan 23 08:57:49 crc kubenswrapper[4711]: E0123 08:57:49.089681 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f07f84b-4db9-4a0d-942c-97c1f363d2d4" containerName="registry-server" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.089691 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f07f84b-4db9-4a0d-942c-97c1f363d2d4" containerName="registry-server" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.089923 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f07f84b-4db9-4a0d-942c-97c1f363d2d4" containerName="registry-server" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.091475 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.099647 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w75rd"] Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.226761 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-catalog-content\") pod \"community-operators-w75rd\" (UID: \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\") " pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.226877 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-utilities\") pod \"community-operators-w75rd\" (UID: \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\") " pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.227108 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qttkp\" (UniqueName: \"kubernetes.io/projected/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-kube-api-access-qttkp\") pod \"community-operators-w75rd\" (UID: \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\") " pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.328820 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qttkp\" (UniqueName: \"kubernetes.io/projected/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-kube-api-access-qttkp\") pod \"community-operators-w75rd\" (UID: \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\") " pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.328942 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-catalog-content\") pod \"community-operators-w75rd\" (UID: \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\") " pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.328977 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-utilities\") pod \"community-operators-w75rd\" (UID: \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\") " pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.329417 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-utilities\") pod \"community-operators-w75rd\" (UID: \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\") " pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.329715 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-catalog-content\") pod \"community-operators-w75rd\" (UID: \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\") " pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.348689 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qttkp\" (UniqueName: \"kubernetes.io/projected/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-kube-api-access-qttkp\") pod \"community-operators-w75rd\" (UID: \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\") " pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.410815 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:49 crc kubenswrapper[4711]: I0123 08:57:49.971032 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w75rd"] Jan 23 08:57:50 crc kubenswrapper[4711]: I0123 08:57:50.746882 4711 generic.go:334] "Generic (PLEG): container finished" podID="9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" containerID="6e537ba02bce0af96233fa620c23c9e48f7897f131b8bd06b5af81a30d956b9d" exitCode=0 Jan 23 08:57:50 crc kubenswrapper[4711]: I0123 08:57:50.747131 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w75rd" event={"ID":"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257","Type":"ContainerDied","Data":"6e537ba02bce0af96233fa620c23c9e48f7897f131b8bd06b5af81a30d956b9d"} Jan 23 08:57:50 crc kubenswrapper[4711]: I0123 08:57:50.747943 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w75rd" event={"ID":"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257","Type":"ContainerStarted","Data":"da2e4621f4e5829447a6587bb1daabc5252880fa3d1f23dc9a9475d79158bb28"} Jan 23 08:57:50 crc kubenswrapper[4711]: I0123 08:57:50.749518 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 23 08:57:52 crc kubenswrapper[4711]: I0123 08:57:52.764054 4711 generic.go:334] "Generic (PLEG): container finished" podID="9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" containerID="bfd383392a04dcc41415871d0b08bffcd01527ec11c7d9786211a1a21ac50e2f" exitCode=0 Jan 23 08:57:52 crc kubenswrapper[4711]: I0123 08:57:52.764163 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w75rd" event={"ID":"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257","Type":"ContainerDied","Data":"bfd383392a04dcc41415871d0b08bffcd01527ec11c7d9786211a1a21ac50e2f"} Jan 23 08:57:53 crc kubenswrapper[4711]: I0123 08:57:53.775096 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w75rd" event={"ID":"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257","Type":"ContainerStarted","Data":"d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c"} Jan 23 08:57:53 crc kubenswrapper[4711]: I0123 08:57:53.794109 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-w75rd" podStartSLOduration=2.3614318069999998 podStartE2EDuration="4.794088426s" podCreationTimestamp="2026-01-23 08:57:49 +0000 UTC" firstStartedPulling="2026-01-23 08:57:50.749062911 +0000 UTC m=+2256.322019279" lastFinishedPulling="2026-01-23 08:57:53.18171953 +0000 UTC m=+2258.754675898" observedRunningTime="2026-01-23 08:57:53.790879257 +0000 UTC m=+2259.363835635" watchObservedRunningTime="2026-01-23 08:57:53.794088426 +0000 UTC m=+2259.367044804" Jan 23 08:57:58 crc kubenswrapper[4711]: I0123 08:57:58.473872 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 08:57:58 crc kubenswrapper[4711]: E0123 08:57:58.475028 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:57:59 crc kubenswrapper[4711]: I0123 08:57:59.411426 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:59 crc kubenswrapper[4711]: I0123 08:57:59.412063 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:59 crc kubenswrapper[4711]: I0123 08:57:59.471665 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:59 crc kubenswrapper[4711]: I0123 08:57:59.865819 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:57:59 crc kubenswrapper[4711]: I0123 08:57:59.916576 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w75rd"] Jan 23 08:58:01 crc kubenswrapper[4711]: I0123 08:58:01.836183 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-w75rd" podUID="9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" containerName="registry-server" containerID="cri-o://d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c" gracePeriod=2 Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.621379 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.765912 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-catalog-content\") pod \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\" (UID: \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\") " Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.766012 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qttkp\" (UniqueName: \"kubernetes.io/projected/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-kube-api-access-qttkp\") pod \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\" (UID: \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\") " Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.766123 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-utilities\") pod \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\" (UID: \"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257\") " Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.767265 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-utilities" (OuterVolumeSpecName: "utilities") pod "9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" (UID: "9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.774103 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-kube-api-access-qttkp" (OuterVolumeSpecName: "kube-api-access-qttkp") pod "9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" (UID: "9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257"). InnerVolumeSpecName "kube-api-access-qttkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.819057 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" (UID: "9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.844869 4711 generic.go:334] "Generic (PLEG): container finished" podID="9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" containerID="d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c" exitCode=0 Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.844910 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w75rd" event={"ID":"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257","Type":"ContainerDied","Data":"d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c"} Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.844936 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w75rd" event={"ID":"9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257","Type":"ContainerDied","Data":"da2e4621f4e5829447a6587bb1daabc5252880fa3d1f23dc9a9475d79158bb28"} Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.844953 4711 scope.go:117] "RemoveContainer" containerID="d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.844975 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w75rd" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.863358 4711 scope.go:117] "RemoveContainer" containerID="bfd383392a04dcc41415871d0b08bffcd01527ec11c7d9786211a1a21ac50e2f" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.868444 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.868472 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qttkp\" (UniqueName: \"kubernetes.io/projected/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-kube-api-access-qttkp\") on node \"crc\" DevicePath \"\"" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.868485 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.886328 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w75rd"] Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.895407 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-w75rd"] Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.901906 4711 scope.go:117] "RemoveContainer" containerID="6e537ba02bce0af96233fa620c23c9e48f7897f131b8bd06b5af81a30d956b9d" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.923882 4711 scope.go:117] "RemoveContainer" containerID="d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c" Jan 23 08:58:02 crc kubenswrapper[4711]: E0123 08:58:02.924463 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c\": container with ID starting with d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c not found: ID does not exist" containerID="d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.924530 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c"} err="failed to get container status \"d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c\": rpc error: code = NotFound desc = could not find container \"d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c\": container with ID starting with d7580dd07f7c75aa7d8ccbeb8dfd792b62c43c7d1c9814c6214154d7444aa25c not found: ID does not exist" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.924559 4711 scope.go:117] "RemoveContainer" containerID="bfd383392a04dcc41415871d0b08bffcd01527ec11c7d9786211a1a21ac50e2f" Jan 23 08:58:02 crc kubenswrapper[4711]: E0123 08:58:02.925163 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfd383392a04dcc41415871d0b08bffcd01527ec11c7d9786211a1a21ac50e2f\": container with ID starting with bfd383392a04dcc41415871d0b08bffcd01527ec11c7d9786211a1a21ac50e2f not found: ID does not exist" containerID="bfd383392a04dcc41415871d0b08bffcd01527ec11c7d9786211a1a21ac50e2f" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.925197 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfd383392a04dcc41415871d0b08bffcd01527ec11c7d9786211a1a21ac50e2f"} err="failed to get container status \"bfd383392a04dcc41415871d0b08bffcd01527ec11c7d9786211a1a21ac50e2f\": rpc error: code = NotFound desc = could not find container \"bfd383392a04dcc41415871d0b08bffcd01527ec11c7d9786211a1a21ac50e2f\": container with ID starting with bfd383392a04dcc41415871d0b08bffcd01527ec11c7d9786211a1a21ac50e2f not found: ID does not exist" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.925220 4711 scope.go:117] "RemoveContainer" containerID="6e537ba02bce0af96233fa620c23c9e48f7897f131b8bd06b5af81a30d956b9d" Jan 23 08:58:02 crc kubenswrapper[4711]: E0123 08:58:02.925484 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e537ba02bce0af96233fa620c23c9e48f7897f131b8bd06b5af81a30d956b9d\": container with ID starting with 6e537ba02bce0af96233fa620c23c9e48f7897f131b8bd06b5af81a30d956b9d not found: ID does not exist" containerID="6e537ba02bce0af96233fa620c23c9e48f7897f131b8bd06b5af81a30d956b9d" Jan 23 08:58:02 crc kubenswrapper[4711]: I0123 08:58:02.925524 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e537ba02bce0af96233fa620c23c9e48f7897f131b8bd06b5af81a30d956b9d"} err="failed to get container status \"6e537ba02bce0af96233fa620c23c9e48f7897f131b8bd06b5af81a30d956b9d\": rpc error: code = NotFound desc = could not find container \"6e537ba02bce0af96233fa620c23c9e48f7897f131b8bd06b5af81a30d956b9d\": container with ID starting with 6e537ba02bce0af96233fa620c23c9e48f7897f131b8bd06b5af81a30d956b9d not found: ID does not exist" Jan 23 08:58:03 crc kubenswrapper[4711]: I0123 08:58:03.482541 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" path="/var/lib/kubelet/pods/9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257/volumes" Jan 23 08:58:09 crc kubenswrapper[4711]: I0123 08:58:09.474041 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 08:58:09 crc kubenswrapper[4711]: E0123 08:58:09.474872 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:58:22 crc kubenswrapper[4711]: I0123 08:58:22.270925 4711 scope.go:117] "RemoveContainer" containerID="4edf8832aec27e995aa3ea0d4eabd1e49e7a51ca65c5b85bdef7502040b4e3a8" Jan 23 08:58:23 crc kubenswrapper[4711]: I0123 08:58:23.473670 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 08:58:23 crc kubenswrapper[4711]: E0123 08:58:23.474096 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:58:35 crc kubenswrapper[4711]: I0123 08:58:35.478883 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 08:58:35 crc kubenswrapper[4711]: E0123 08:58:35.479707 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:58:50 crc kubenswrapper[4711]: I0123 08:58:50.474101 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 08:58:50 crc kubenswrapper[4711]: E0123 08:58:50.474832 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:59:03 crc kubenswrapper[4711]: I0123 08:59:03.474067 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 08:59:03 crc kubenswrapper[4711]: E0123 08:59:03.475841 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:59:16 crc kubenswrapper[4711]: I0123 08:59:16.507629 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 08:59:16 crc kubenswrapper[4711]: E0123 08:59:16.509842 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:59:19 crc kubenswrapper[4711]: I0123 08:59:19.754490 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-784948c4bd-c7q8r_b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7/keystone-api/0.log" Jan 23 08:59:22 crc kubenswrapper[4711]: I0123 08:59:22.347313 4711 scope.go:117] "RemoveContainer" containerID="40e825ea9a6e96598613640ec00e4d72940c4231e278b40662a68b219879df50" Jan 23 08:59:22 crc kubenswrapper[4711]: I0123 08:59:22.374840 4711 scope.go:117] "RemoveContainer" containerID="5403d5d5a2e294ac2483a6bf2592295e3dd72bde46af8f490c41be40e8ac299a" Jan 23 08:59:22 crc kubenswrapper[4711]: I0123 08:59:22.408228 4711 scope.go:117] "RemoveContainer" containerID="86ab0618d146f5612d3cf08cb93f536e357bf5978c2133b564e58af5cdc2f575" Jan 23 08:59:22 crc kubenswrapper[4711]: I0123 08:59:22.986736 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_memcached-0_e899df0c-3fb3-4d7b-b376-0a907dbc82a0/memcached/0.log" Jan 23 08:59:23 crc kubenswrapper[4711]: I0123 08:59:23.532596 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-api-30c6-account-create-update-jsrs5_efaa3835-9199-4083-b21a-fe0513b1f665/mariadb-account-create-update/0.log" Jan 23 08:59:24 crc kubenswrapper[4711]: I0123 08:59:24.061628 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-api-db-create-rtqsh_b032059c-7845-4107-b676-1e1d66d18d16/mariadb-database-create/0.log" Jan 23 08:59:24 crc kubenswrapper[4711]: I0123 08:59:24.595546 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-cell0-6077-account-create-update-dljdv_e5263b1b-5e34-49db-a73a-4e179736aae9/mariadb-account-create-update/0.log" Jan 23 08:59:25 crc kubenswrapper[4711]: I0123 08:59:25.119114 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-cell0-db-create-lh44q_8aaefd64-d6df-4ecb-bdb2-ed135a281f26/mariadb-database-create/0.log" Jan 23 08:59:25 crc kubenswrapper[4711]: I0123 08:59:25.571177 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-cell1-044b-account-create-update-dvckg_af839ca6-1c3d-41e0-807c-5154f96201c0/mariadb-account-create-update/0.log" Jan 23 08:59:25 crc kubenswrapper[4711]: I0123 08:59:25.993693 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:59:25 crc kubenswrapper[4711]: I0123 08:59:25.994191 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:59:26 crc kubenswrapper[4711]: I0123 08:59:26.026796 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-cell1-db-create-4q85d_979e054b-e3c8-42e2-926f-49d0decb456c/mariadb-database-create/0.log" Jan 23 08:59:26 crc kubenswrapper[4711]: I0123 08:59:26.526162 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-api-0_f70146fe-2308-422b-9efa-42b334f7675f/nova-kuttl-api-log/0.log" Jan 23 08:59:26 crc kubenswrapper[4711]: I0123 08:59:26.931222 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell0-cell-mapping-4nfnc_4e42940a-1607-4217-b21e-789504a59b2d/nova-manage/0.log" Jan 23 08:59:27 crc kubenswrapper[4711]: I0123 08:59:27.433633 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell0-conductor-0_944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a/nova-kuttl-cell0-conductor-conductor/0.log" Jan 23 08:59:27 crc kubenswrapper[4711]: I0123 08:59:27.859853 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell0-conductor-db-sync-9zdrv_847224a6-d90c-4be4-a4ea-c60e7c6d9986/nova-kuttl-cell0-conductor-db-sync/0.log" Jan 23 08:59:28 crc kubenswrapper[4711]: I0123 08:59:28.287964 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell1-cell-delete-h5mxz_ff4efd5e-ae35-4d90-b767-7bfd505b441e/nova-manage/5.log" Jan 23 08:59:28 crc kubenswrapper[4711]: I0123 08:59:28.746911 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell1-cell-mapping-2mtcc_5518fb5f-7762-4beb-b66e-c5d463c3a672/nova-manage/0.log" Jan 23 08:59:29 crc kubenswrapper[4711]: I0123 08:59:29.258089 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell1-conductor-0_704a67ad-6f29-43f2-b01f-be325aa8cb91/nova-kuttl-cell1-conductor-conductor/0.log" Jan 23 08:59:29 crc kubenswrapper[4711]: I0123 08:59:29.473615 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 08:59:29 crc kubenswrapper[4711]: E0123 08:59:29.473977 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:59:29 crc kubenswrapper[4711]: I0123 08:59:29.689717 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell1-conductor-db-sync-82pn7_ee1bec46-a27c-4fac-b0c7-71eb3671700e/nova-kuttl-cell1-conductor-db-sync/0.log" Jan 23 08:59:30 crc kubenswrapper[4711]: I0123 08:59:30.153074 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell1-novncproxy-0_4b9c7342-6111-4e46-8bc7-6edcddd570af/nova-kuttl-cell1-novncproxy-novncproxy/0.log" Jan 23 08:59:30 crc kubenswrapper[4711]: I0123 08:59:30.693391 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-metadata-0_667ff795-4e58-403c-9f54-bd5c2ace5456/nova-kuttl-metadata-log/0.log" Jan 23 08:59:31 crc kubenswrapper[4711]: I0123 08:59:31.159311 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-scheduler-0_44ba32b3-a02b-4ab5-a00c-90fb25eea139/nova-kuttl-scheduler-scheduler/0.log" Jan 23 08:59:31 crc kubenswrapper[4711]: I0123 08:59:31.603219 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-cell1-galera-0_13ccba34-03d8-4429-bace-b75cb5d12763/galera/0.log" Jan 23 08:59:32 crc kubenswrapper[4711]: I0123 08:59:32.084240 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-galera-0_a21f5317-eee2-4f13-9df5-40c48bce5aaf/galera/0.log" Jan 23 08:59:32 crc kubenswrapper[4711]: I0123 08:59:32.589649 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstackclient_3ac7d3e6-9992-4a83-bbff-8c99ef784b20/openstackclient/0.log" Jan 23 08:59:33 crc kubenswrapper[4711]: I0123 08:59:33.068185 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-559c845968-gb6qv_eb857478-48a3-4ed9-8a19-47386937c4d7/placement-log/0.log" Jan 23 08:59:33 crc kubenswrapper[4711]: I0123 08:59:33.520044 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-broadcaster-server-0_b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf/rabbitmq/0.log" Jan 23 08:59:34 crc kubenswrapper[4711]: I0123 08:59:34.017279 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-cell1-server-0_00e53f46-c48c-4f2c-83aa-088781b82d46/rabbitmq/0.log" Jan 23 08:59:34 crc kubenswrapper[4711]: I0123 08:59:34.518476 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-server-0_f970c1db-48d5-4b49-afc1-eee7e1289da9/rabbitmq/0.log" Jan 23 08:59:42 crc kubenswrapper[4711]: I0123 08:59:42.473597 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 08:59:42 crc kubenswrapper[4711]: E0123 08:59:42.474498 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 08:59:55 crc kubenswrapper[4711]: I0123 08:59:55.994014 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 08:59:55 crc kubenswrapper[4711]: I0123 08:59:55.994737 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 08:59:57 crc kubenswrapper[4711]: I0123 08:59:57.474215 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 08:59:57 crc kubenswrapper[4711]: E0123 08:59:57.475247 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.155298 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws"] Jan 23 09:00:00 crc kubenswrapper[4711]: E0123 09:00:00.155986 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" containerName="registry-server" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.156000 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" containerName="registry-server" Jan 23 09:00:00 crc kubenswrapper[4711]: E0123 09:00:00.156013 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" containerName="extract-content" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.156019 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" containerName="extract-content" Jan 23 09:00:00 crc kubenswrapper[4711]: E0123 09:00:00.156027 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" containerName="extract-utilities" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.156035 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" containerName="extract-utilities" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.156196 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c267fc4-3e79-4ff3-a9d3-0fa6b8d6a257" containerName="registry-server" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.156822 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.159459 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.159463 4711 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.169990 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws"] Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.310976 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-config-volume\") pod \"collect-profiles-29485980-xhzws\" (UID: \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.311012 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-secret-volume\") pod \"collect-profiles-29485980-xhzws\" (UID: \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.311088 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rbzs\" (UniqueName: \"kubernetes.io/projected/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-kube-api-access-2rbzs\") pod \"collect-profiles-29485980-xhzws\" (UID: \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.412718 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-config-volume\") pod \"collect-profiles-29485980-xhzws\" (UID: \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.412769 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-secret-volume\") pod \"collect-profiles-29485980-xhzws\" (UID: \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.412848 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rbzs\" (UniqueName: \"kubernetes.io/projected/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-kube-api-access-2rbzs\") pod \"collect-profiles-29485980-xhzws\" (UID: \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.413709 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-config-volume\") pod \"collect-profiles-29485980-xhzws\" (UID: \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.437566 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-secret-volume\") pod \"collect-profiles-29485980-xhzws\" (UID: \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.439860 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rbzs\" (UniqueName: \"kubernetes.io/projected/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-kube-api-access-2rbzs\") pod \"collect-profiles-29485980-xhzws\" (UID: \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.526283 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:00 crc kubenswrapper[4711]: I0123 09:00:00.979338 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws"] Jan 23 09:00:01 crc kubenswrapper[4711]: I0123 09:00:01.795715 4711 generic.go:334] "Generic (PLEG): container finished" podID="edcec5fe-f4b0-4c3c-8bca-c7f8ca760874" containerID="5c985789b636bae5613a565bfea5ea76925bbd7719953e89ac539568f688f582" exitCode=0 Jan 23 09:00:01 crc kubenswrapper[4711]: I0123 09:00:01.795924 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" event={"ID":"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874","Type":"ContainerDied","Data":"5c985789b636bae5613a565bfea5ea76925bbd7719953e89ac539568f688f582"} Jan 23 09:00:01 crc kubenswrapper[4711]: I0123 09:00:01.795994 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" event={"ID":"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874","Type":"ContainerStarted","Data":"b77b8a82c3fd72d76b122deddf3b5c5f04da01680b8f560fa471a7c2f66e5be7"} Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.092457 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.259685 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-secret-volume\") pod \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\" (UID: \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\") " Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.260052 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rbzs\" (UniqueName: \"kubernetes.io/projected/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-kube-api-access-2rbzs\") pod \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\" (UID: \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\") " Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.260244 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-config-volume\") pod \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\" (UID: \"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874\") " Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.260892 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-config-volume" (OuterVolumeSpecName: "config-volume") pod "edcec5fe-f4b0-4c3c-8bca-c7f8ca760874" (UID: "edcec5fe-f4b0-4c3c-8bca-c7f8ca760874"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.265334 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-kube-api-access-2rbzs" (OuterVolumeSpecName: "kube-api-access-2rbzs") pod "edcec5fe-f4b0-4c3c-8bca-c7f8ca760874" (UID: "edcec5fe-f4b0-4c3c-8bca-c7f8ca760874"). InnerVolumeSpecName "kube-api-access-2rbzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.265649 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "edcec5fe-f4b0-4c3c-8bca-c7f8ca760874" (UID: "edcec5fe-f4b0-4c3c-8bca-c7f8ca760874"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.361902 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rbzs\" (UniqueName: \"kubernetes.io/projected/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-kube-api-access-2rbzs\") on node \"crc\" DevicePath \"\"" Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.361938 4711 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-config-volume\") on node \"crc\" DevicePath \"\"" Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.361947 4711 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/edcec5fe-f4b0-4c3c-8bca-c7f8ca760874-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.813948 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" event={"ID":"edcec5fe-f4b0-4c3c-8bca-c7f8ca760874","Type":"ContainerDied","Data":"b77b8a82c3fd72d76b122deddf3b5c5f04da01680b8f560fa471a7c2f66e5be7"} Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.814496 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b77b8a82c3fd72d76b122deddf3b5c5f04da01680b8f560fa471a7c2f66e5be7" Jan 23 09:00:03 crc kubenswrapper[4711]: I0123 09:00:03.814002 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29485980-xhzws" Jan 23 09:00:04 crc kubenswrapper[4711]: I0123 09:00:04.170063 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh"] Jan 23 09:00:04 crc kubenswrapper[4711]: I0123 09:00:04.178752 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29485935-856kh"] Jan 23 09:00:05 crc kubenswrapper[4711]: I0123 09:00:05.484541 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1ab258d-ba2f-434d-8a15-2a08f24d03cb" path="/var/lib/kubelet/pods/e1ab258d-ba2f-434d-8a15-2a08f24d03cb/volumes" Jan 23 09:00:07 crc kubenswrapper[4711]: I0123 09:00:07.033408 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-59dd8b7cbf-gw4s9_a7d6e419-04ce-4f5c-93a9-34d14a8c531a/manager/0.log" Jan 23 09:00:07 crc kubenswrapper[4711]: I0123 09:00:07.462776 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-69cf5d4557-kjjf2_0c4bb18f-fc6b-49ea-a9c3-971c666a935b/manager/0.log" Jan 23 09:00:07 crc kubenswrapper[4711]: I0123 09:00:07.924881 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b45d7bf98-4pw4h_c6c6b995-fa92-4cf2-87a1-361881e8c284/manager/0.log" Jan 23 09:00:08 crc kubenswrapper[4711]: I0123 09:00:08.333591 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl_c1bb2b11-1e9e-4000-996b-8097bcc3a448/extract/0.log" Jan 23 09:00:08 crc kubenswrapper[4711]: I0123 09:00:08.742390 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8_ee296785-739b-4103-84c5-ab2fe24f3a7c/extract/0.log" Jan 23 09:00:09 crc kubenswrapper[4711]: I0123 09:00:09.184849 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-78fdd796fd-62nhb_e609d803-23cf-4d04-8587-bdd492c4c4bd/manager/0.log" Jan 23 09:00:09 crc kubenswrapper[4711]: I0123 09:00:09.474244 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 09:00:09 crc kubenswrapper[4711]: E0123 09:00:09.474622 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 09:00:09 crc kubenswrapper[4711]: I0123 09:00:09.637342 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-sk67x_7c54a5ef-3d58-4010-875b-8b6022692c7e/manager/0.log" Jan 23 09:00:10 crc kubenswrapper[4711]: I0123 09:00:10.085730 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-v7tnr_cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed/manager/0.log" Jan 23 09:00:10 crc kubenswrapper[4711]: I0123 09:00:10.634212 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-54ccf4f85d-vs5n6_467b6d38-a02c-44f9-81bf-3bda90dc4efd/manager/0.log" Jan 23 09:00:11 crc kubenswrapper[4711]: I0123 09:00:11.048234 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-69d6c9f5b8-ghlgc_5ad48aae-ab84-4c93-9d0e-cb4fd24884dd/manager/0.log" Jan 23 09:00:11 crc kubenswrapper[4711]: I0123 09:00:11.537368 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b8b6d4659-9fsww_f57c2bcd-cd26-420c-a3f9-64b5d4d1a916/manager/0.log" Jan 23 09:00:11 crc kubenswrapper[4711]: I0123 09:00:11.997388 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-78c6999f6f-8ptr2_effbd1c1-9a1a-4a5c-9955-6a1005746383/manager/0.log" Jan 23 09:00:12 crc kubenswrapper[4711]: I0123 09:00:12.456549 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-c87fff755-bjrrb_bfc822a0-472c-4e41-99c9-35605ebea5c6/manager/0.log" Jan 23 09:00:12 crc kubenswrapper[4711]: I0123 09:00:12.834358 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5d8f59fb49-42kx2_b764e900-9f5f-49e6-b6a8-8ad55007cc54/manager/0.log" Jan 23 09:00:13 crc kubenswrapper[4711]: I0123 09:00:13.707315 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-6b684b99ff-stnvz_c9735021-bb7c-43ac-b95e-a4fd2d26c84a/manager/0.log" Jan 23 09:00:14 crc kubenswrapper[4711]: I0123 09:00:14.118758 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-index-tm9mb_0bbe37a8-f874-4b6b-8f5d-35c1f4a463cc/registry-server/0.log" Jan 23 09:00:14 crc kubenswrapper[4711]: I0123 09:00:14.521549 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7bd9774b6-cmn6r_f1b9f385-b045-407d-a56c-87750c1c5972/manager/0.log" Jan 23 09:00:14 crc kubenswrapper[4711]: I0123 09:00:14.925702 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6b68b8b854q774f_e275a9a3-3a29-498a-bea9-b545730a0301/manager/0.log" Jan 23 09:00:15 crc kubenswrapper[4711]: I0123 09:00:15.634700 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6998f5c585-t278t_72674af2-3b9e-47e6-8417-bee428fe826a/manager/0.log" Jan 23 09:00:16 crc kubenswrapper[4711]: I0123 09:00:16.094472 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-2fdmb_eb73ab49-65f1-446a-960e-035803896d9a/registry-server/0.log" Jan 23 09:00:16 crc kubenswrapper[4711]: I0123 09:00:16.474277 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-55db956ddc-plrlz_0b81979e-a44a-40d2-8eff-958e528d95a1/manager/0.log" Jan 23 09:00:16 crc kubenswrapper[4711]: I0123 09:00:16.901205 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5d646b7d76-ctqxt_9eb50a74-06d4-4b43-a0b9-245354b3cde7/manager/0.log" Jan 23 09:00:17 crc kubenswrapper[4711]: I0123 09:00:17.337011 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-hwttv_5726b969-f5cb-4e58-9e8f-92c001f4a7be/operator/0.log" Jan 23 09:00:17 crc kubenswrapper[4711]: I0123 09:00:17.756445 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-4lhtg_366f8b54-0e7e-4d75-9c62-d174624512e4/manager/0.log" Jan 23 09:00:18 crc kubenswrapper[4711]: I0123 09:00:18.193338 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85cd9769bb-qnzz6_793a9ab1-c243-4a56-9463-3be8147bff44/manager/0.log" Jan 23 09:00:18 crc kubenswrapper[4711]: I0123 09:00:18.682902 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-6wgwv_a1ec006d-63fd-4fac-b5f5-df222bab8638/manager/0.log" Jan 23 09:00:19 crc kubenswrapper[4711]: I0123 09:00:19.193341 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5ffb9c6597-lvcpb_458cb03b-3e35-4219-8009-08829d99da25/manager/0.log" Jan 23 09:00:22 crc kubenswrapper[4711]: I0123 09:00:22.494750 4711 scope.go:117] "RemoveContainer" containerID="806b801deab8860adaf602e8a412d238e40516c62c604d434f2ad5d861286a96" Jan 23 09:00:24 crc kubenswrapper[4711]: I0123 09:00:24.099258 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-784948c4bd-c7q8r_b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7/keystone-api/0.log" Jan 23 09:00:24 crc kubenswrapper[4711]: I0123 09:00:24.473223 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 09:00:24 crc kubenswrapper[4711]: E0123 09:00:24.473484 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-manage\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=nova-manage pod=nova-kuttl-cell1-cell-delete-h5mxz_nova-kuttl-default(ff4efd5e-ae35-4d90-b767-7bfd505b441e)\"" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" Jan 23 09:00:25 crc kubenswrapper[4711]: I0123 09:00:25.993212 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 09:00:25 crc kubenswrapper[4711]: I0123 09:00:25.993613 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 09:00:25 crc kubenswrapper[4711]: I0123 09:00:25.993691 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 09:00:25 crc kubenswrapper[4711]: I0123 09:00:25.994673 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7"} pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 23 09:00:25 crc kubenswrapper[4711]: I0123 09:00:25.994731 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" containerID="cri-o://19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" gracePeriod=600 Jan 23 09:00:26 crc kubenswrapper[4711]: E0123 09:00:26.116013 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:00:26 crc kubenswrapper[4711]: I0123 09:00:26.998471 4711 generic.go:334] "Generic (PLEG): container finished" podID="3846d4e0-cfda-4e0b-8747-85267de12736" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" exitCode=0 Jan 23 09:00:26 crc kubenswrapper[4711]: I0123 09:00:26.998543 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerDied","Data":"19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7"} Jan 23 09:00:26 crc kubenswrapper[4711]: I0123 09:00:26.999277 4711 scope.go:117] "RemoveContainer" containerID="6fc3e292f98695a914f968c4697ab5b4100d4aad931a282b95317fbc924708bc" Jan 23 09:00:27 crc kubenswrapper[4711]: I0123 09:00:27.000878 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:00:27 crc kubenswrapper[4711]: E0123 09:00:27.001281 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:00:27 crc kubenswrapper[4711]: I0123 09:00:27.229739 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_memcached-0_e899df0c-3fb3-4d7b-b376-0a907dbc82a0/memcached/0.log" Jan 23 09:00:27 crc kubenswrapper[4711]: I0123 09:00:27.778704 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-api-30c6-account-create-update-jsrs5_efaa3835-9199-4083-b21a-fe0513b1f665/mariadb-account-create-update/0.log" Jan 23 09:00:28 crc kubenswrapper[4711]: I0123 09:00:28.313430 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-api-db-create-rtqsh_b032059c-7845-4107-b676-1e1d66d18d16/mariadb-database-create/0.log" Jan 23 09:00:28 crc kubenswrapper[4711]: I0123 09:00:28.875564 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-cell0-6077-account-create-update-dljdv_e5263b1b-5e34-49db-a73a-4e179736aae9/mariadb-account-create-update/0.log" Jan 23 09:00:29 crc kubenswrapper[4711]: I0123 09:00:29.431689 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-cell0-db-create-lh44q_8aaefd64-d6df-4ecb-bdb2-ed135a281f26/mariadb-database-create/0.log" Jan 23 09:00:29 crc kubenswrapper[4711]: I0123 09:00:29.918199 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-cell1-044b-account-create-update-dvckg_af839ca6-1c3d-41e0-807c-5154f96201c0/mariadb-account-create-update/0.log" Jan 23 09:00:30 crc kubenswrapper[4711]: I0123 09:00:30.376275 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-cell1-db-create-4q85d_979e054b-e3c8-42e2-926f-49d0decb456c/mariadb-database-create/0.log" Jan 23 09:00:30 crc kubenswrapper[4711]: I0123 09:00:30.929798 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-api-0_f70146fe-2308-422b-9efa-42b334f7675f/nova-kuttl-api-log/0.log" Jan 23 09:00:31 crc kubenswrapper[4711]: I0123 09:00:31.415727 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell0-cell-mapping-4nfnc_4e42940a-1607-4217-b21e-789504a59b2d/nova-manage/0.log" Jan 23 09:00:31 crc kubenswrapper[4711]: I0123 09:00:31.920071 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell0-conductor-0_944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a/nova-kuttl-cell0-conductor-conductor/0.log" Jan 23 09:00:32 crc kubenswrapper[4711]: I0123 09:00:32.657978 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell0-conductor-db-sync-9zdrv_847224a6-d90c-4be4-a4ea-c60e7c6d9986/nova-kuttl-cell0-conductor-db-sync/0.log" Jan 23 09:00:33 crc kubenswrapper[4711]: I0123 09:00:33.176484 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell1-cell-delete-h5mxz_ff4efd5e-ae35-4d90-b767-7bfd505b441e/nova-manage/5.log" Jan 23 09:00:33 crc kubenswrapper[4711]: I0123 09:00:33.668943 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell1-cell-mapping-2mtcc_5518fb5f-7762-4beb-b66e-c5d463c3a672/nova-manage/0.log" Jan 23 09:00:34 crc kubenswrapper[4711]: I0123 09:00:34.199994 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell1-conductor-0_704a67ad-6f29-43f2-b01f-be325aa8cb91/nova-kuttl-cell1-conductor-conductor/0.log" Jan 23 09:00:34 crc kubenswrapper[4711]: I0123 09:00:34.646895 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell1-conductor-db-sync-82pn7_ee1bec46-a27c-4fac-b0c7-71eb3671700e/nova-kuttl-cell1-conductor-db-sync/0.log" Jan 23 09:00:35 crc kubenswrapper[4711]: I0123 09:00:35.109473 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell1-novncproxy-0_4b9c7342-6111-4e46-8bc7-6edcddd570af/nova-kuttl-cell1-novncproxy-novncproxy/0.log" Jan 23 09:00:35 crc kubenswrapper[4711]: I0123 09:00:35.616244 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-metadata-0_667ff795-4e58-403c-9f54-bd5c2ace5456/nova-kuttl-metadata-log/0.log" Jan 23 09:00:36 crc kubenswrapper[4711]: I0123 09:00:36.125391 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-scheduler-0_44ba32b3-a02b-4ab5-a00c-90fb25eea139/nova-kuttl-scheduler-scheduler/0.log" Jan 23 09:00:36 crc kubenswrapper[4711]: I0123 09:00:36.474041 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 09:00:36 crc kubenswrapper[4711]: I0123 09:00:36.605139 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-cell1-galera-0_13ccba34-03d8-4429-bace-b75cb5d12763/galera/0.log" Jan 23 09:00:37 crc kubenswrapper[4711]: I0123 09:00:37.074767 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerStarted","Data":"7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b"} Jan 23 09:00:37 crc kubenswrapper[4711]: I0123 09:00:37.156467 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-galera-0_a21f5317-eee2-4f13-9df5-40c48bce5aaf/galera/0.log" Jan 23 09:00:37 crc kubenswrapper[4711]: I0123 09:00:37.545894 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstackclient_3ac7d3e6-9992-4a83-bbff-8c99ef784b20/openstackclient/0.log" Jan 23 09:00:37 crc kubenswrapper[4711]: I0123 09:00:37.966848 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-559c845968-gb6qv_eb857478-48a3-4ed9-8a19-47386937c4d7/placement-log/0.log" Jan 23 09:00:38 crc kubenswrapper[4711]: I0123 09:00:38.108448 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz"] Jan 23 09:00:38 crc kubenswrapper[4711]: I0123 09:00:38.108700 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" containerID="cri-o://7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b" gracePeriod=30 Jan 23 09:00:38 crc kubenswrapper[4711]: I0123 09:00:38.446452 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-broadcaster-server-0_b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf/rabbitmq/0.log" Jan 23 09:00:38 crc kubenswrapper[4711]: I0123 09:00:38.914699 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-cell1-server-0_00e53f46-c48c-4f2c-83aa-088781b82d46/rabbitmq/0.log" Jan 23 09:00:39 crc kubenswrapper[4711]: I0123 09:00:39.376765 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-server-0_f970c1db-48d5-4b49-afc1-eee7e1289da9/rabbitmq/0.log" Jan 23 09:00:41 crc kubenswrapper[4711]: I0123 09:00:41.892550 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 09:00:41 crc kubenswrapper[4711]: I0123 09:00:41.978066 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mt5k9\" (UniqueName: \"kubernetes.io/projected/ff4efd5e-ae35-4d90-b767-7bfd505b441e-kube-api-access-mt5k9\") pod \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\" (UID: \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\") " Jan 23 09:00:41 crc kubenswrapper[4711]: I0123 09:00:41.978128 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff4efd5e-ae35-4d90-b767-7bfd505b441e-scripts\") pod \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\" (UID: \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\") " Jan 23 09:00:41 crc kubenswrapper[4711]: I0123 09:00:41.978203 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff4efd5e-ae35-4d90-b767-7bfd505b441e-config-data\") pod \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\" (UID: \"ff4efd5e-ae35-4d90-b767-7bfd505b441e\") " Jan 23 09:00:41 crc kubenswrapper[4711]: I0123 09:00:41.995841 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff4efd5e-ae35-4d90-b767-7bfd505b441e-kube-api-access-mt5k9" (OuterVolumeSpecName: "kube-api-access-mt5k9") pod "ff4efd5e-ae35-4d90-b767-7bfd505b441e" (UID: "ff4efd5e-ae35-4d90-b767-7bfd505b441e"). InnerVolumeSpecName "kube-api-access-mt5k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 09:00:41 crc kubenswrapper[4711]: I0123 09:00:41.997947 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff4efd5e-ae35-4d90-b767-7bfd505b441e-scripts" (OuterVolumeSpecName: "scripts") pod "ff4efd5e-ae35-4d90-b767-7bfd505b441e" (UID: "ff4efd5e-ae35-4d90-b767-7bfd505b441e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.032048 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff4efd5e-ae35-4d90-b767-7bfd505b441e-config-data" (OuterVolumeSpecName: "config-data") pod "ff4efd5e-ae35-4d90-b767-7bfd505b441e" (UID: "ff4efd5e-ae35-4d90-b767-7bfd505b441e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.079819 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mt5k9\" (UniqueName: \"kubernetes.io/projected/ff4efd5e-ae35-4d90-b767-7bfd505b441e-kube-api-access-mt5k9\") on node \"crc\" DevicePath \"\"" Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.079850 4711 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff4efd5e-ae35-4d90-b767-7bfd505b441e-scripts\") on node \"crc\" DevicePath \"\"" Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.079860 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff4efd5e-ae35-4d90-b767-7bfd505b441e-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.110195 4711 generic.go:334] "Generic (PLEG): container finished" podID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerID="7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b" exitCode=2 Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.110238 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerDied","Data":"7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b"} Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.110262 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" event={"ID":"ff4efd5e-ae35-4d90-b767-7bfd505b441e","Type":"ContainerDied","Data":"dec89bcc917b5ab2e849b341a5544d4edc8ed2ca090d8ad838dacf9e416216ed"} Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.110277 4711 scope.go:117] "RemoveContainer" containerID="7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b" Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.110381 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz" Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.141231 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.143027 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz"] Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.149625 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-delete-h5mxz"] Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.171210 4711 scope.go:117] "RemoveContainer" containerID="7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b" Jan 23 09:00:42 crc kubenswrapper[4711]: E0123 09:00:42.171835 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b\": container with ID starting with 7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b not found: ID does not exist" containerID="7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b" Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.171924 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b"} err="failed to get container status \"7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b\": rpc error: code = NotFound desc = could not find container \"7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b\": container with ID starting with 7893368a901c1c7bbd6f0bca507d95319030d4c068c6b76acf1e471cb7975a0b not found: ID does not exist" Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.172107 4711 scope.go:117] "RemoveContainer" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 09:00:42 crc kubenswrapper[4711]: E0123 09:00:42.172621 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52\": container with ID starting with 554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52 not found: ID does not exist" containerID="554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52" Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.172662 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52"} err="failed to get container status \"554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52\": rpc error: code = NotFound desc = could not find container \"554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52\": container with ID starting with 554490574eb84317d2044c2c063f402c366a74ca4faffceb8b8117a55b529e52 not found: ID does not exist" Jan 23 09:00:42 crc kubenswrapper[4711]: I0123 09:00:42.474364 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:00:42 crc kubenswrapper[4711]: E0123 09:00:42.474631 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:00:43 crc kubenswrapper[4711]: I0123 09:00:43.489146 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" path="/var/lib/kubelet/pods/ff4efd5e-ae35-4d90-b767-7bfd505b441e/volumes" Jan 23 09:00:56 crc kubenswrapper[4711]: I0123 09:00:56.473713 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:00:56 crc kubenswrapper[4711]: E0123 09:00:56.474461 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.147355 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-cron-29485981-v6rjc"] Jan 23 09:01:00 crc kubenswrapper[4711]: E0123 09:01:00.148048 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148062 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: E0123 09:01:00.148076 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edcec5fe-f4b0-4c3c-8bca-c7f8ca760874" containerName="collect-profiles" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148082 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="edcec5fe-f4b0-4c3c-8bca-c7f8ca760874" containerName="collect-profiles" Jan 23 09:01:00 crc kubenswrapper[4711]: E0123 09:01:00.148091 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148097 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: E0123 09:01:00.148103 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148111 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: E0123 09:01:00.148127 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148133 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: E0123 09:01:00.148141 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148147 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148298 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148310 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148317 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="edcec5fe-f4b0-4c3c-8bca-c7f8ca760874" containerName="collect-profiles" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148325 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148339 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148347 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.148960 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.169453 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-cron-29485981-v6rjc"] Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.253602 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg6lt\" (UniqueName: \"kubernetes.io/projected/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-kube-api-access-qg6lt\") pod \"keystone-cron-29485981-v6rjc\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.253902 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-config-data\") pod \"keystone-cron-29485981-v6rjc\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.253942 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-fernet-keys\") pod \"keystone-cron-29485981-v6rjc\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.254047 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-combined-ca-bundle\") pod \"keystone-cron-29485981-v6rjc\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.355149 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg6lt\" (UniqueName: \"kubernetes.io/projected/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-kube-api-access-qg6lt\") pod \"keystone-cron-29485981-v6rjc\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.355282 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-config-data\") pod \"keystone-cron-29485981-v6rjc\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.355333 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-fernet-keys\") pod \"keystone-cron-29485981-v6rjc\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.355370 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-combined-ca-bundle\") pod \"keystone-cron-29485981-v6rjc\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.364657 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-combined-ca-bundle\") pod \"keystone-cron-29485981-v6rjc\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.365019 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-config-data\") pod \"keystone-cron-29485981-v6rjc\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.368850 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-fernet-keys\") pod \"keystone-cron-29485981-v6rjc\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.375306 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg6lt\" (UniqueName: \"kubernetes.io/projected/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-kube-api-access-qg6lt\") pod \"keystone-cron-29485981-v6rjc\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.506193 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:00 crc kubenswrapper[4711]: I0123 09:01:00.915897 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-cron-29485981-v6rjc"] Jan 23 09:01:01 crc kubenswrapper[4711]: I0123 09:01:01.281693 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" event={"ID":"fd607e4c-3a6a-4236-aa1f-7bae23e4db94","Type":"ContainerStarted","Data":"c0a4f8fcdafd29d286150f2646b94d572e9942daf706d826c5fdc588c4928960"} Jan 23 09:01:01 crc kubenswrapper[4711]: I0123 09:01:01.282040 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" event={"ID":"fd607e4c-3a6a-4236-aa1f-7bae23e4db94","Type":"ContainerStarted","Data":"033c25ac7b0eb00bef5cd1bbc246b68603937da238866128224def638fcb8567"} Jan 23 09:01:01 crc kubenswrapper[4711]: I0123 09:01:01.299922 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" podStartSLOduration=1.299901584 podStartE2EDuration="1.299901584s" podCreationTimestamp="2026-01-23 09:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 09:01:01.299022722 +0000 UTC m=+2446.871979090" watchObservedRunningTime="2026-01-23 09:01:01.299901584 +0000 UTC m=+2446.872857962" Jan 23 09:01:03 crc kubenswrapper[4711]: I0123 09:01:03.297596 4711 generic.go:334] "Generic (PLEG): container finished" podID="fd607e4c-3a6a-4236-aa1f-7bae23e4db94" containerID="c0a4f8fcdafd29d286150f2646b94d572e9942daf706d826c5fdc588c4928960" exitCode=0 Jan 23 09:01:03 crc kubenswrapper[4711]: I0123 09:01:03.297646 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" event={"ID":"fd607e4c-3a6a-4236-aa1f-7bae23e4db94","Type":"ContainerDied","Data":"c0a4f8fcdafd29d286150f2646b94d572e9942daf706d826c5fdc588c4928960"} Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.636973 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.736243 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg6lt\" (UniqueName: \"kubernetes.io/projected/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-kube-api-access-qg6lt\") pod \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.736331 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-fernet-keys\") pod \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.736361 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-combined-ca-bundle\") pod \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.736420 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-config-data\") pod \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\" (UID: \"fd607e4c-3a6a-4236-aa1f-7bae23e4db94\") " Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.742168 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-kube-api-access-qg6lt" (OuterVolumeSpecName: "kube-api-access-qg6lt") pod "fd607e4c-3a6a-4236-aa1f-7bae23e4db94" (UID: "fd607e4c-3a6a-4236-aa1f-7bae23e4db94"). InnerVolumeSpecName "kube-api-access-qg6lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.744680 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "fd607e4c-3a6a-4236-aa1f-7bae23e4db94" (UID: "fd607e4c-3a6a-4236-aa1f-7bae23e4db94"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.763762 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd607e4c-3a6a-4236-aa1f-7bae23e4db94" (UID: "fd607e4c-3a6a-4236-aa1f-7bae23e4db94"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.773442 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-config-data" (OuterVolumeSpecName: "config-data") pod "fd607e4c-3a6a-4236-aa1f-7bae23e4db94" (UID: "fd607e4c-3a6a-4236-aa1f-7bae23e4db94"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.838294 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg6lt\" (UniqueName: \"kubernetes.io/projected/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-kube-api-access-qg6lt\") on node \"crc\" DevicePath \"\"" Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.838334 4711 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.838349 4711 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 23 09:01:04 crc kubenswrapper[4711]: I0123 09:01:04.838360 4711 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd607e4c-3a6a-4236-aa1f-7bae23e4db94-config-data\") on node \"crc\" DevicePath \"\"" Jan 23 09:01:05 crc kubenswrapper[4711]: I0123 09:01:05.314866 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" event={"ID":"fd607e4c-3a6a-4236-aa1f-7bae23e4db94","Type":"ContainerDied","Data":"033c25ac7b0eb00bef5cd1bbc246b68603937da238866128224def638fcb8567"} Jan 23 09:01:05 crc kubenswrapper[4711]: I0123 09:01:05.314912 4711 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="033c25ac7b0eb00bef5cd1bbc246b68603937da238866128224def638fcb8567" Jan 23 09:01:05 crc kubenswrapper[4711]: I0123 09:01:05.314998 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-cron-29485981-v6rjc" Jan 23 09:01:07 crc kubenswrapper[4711]: I0123 09:01:07.473899 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:01:07 crc kubenswrapper[4711]: E0123 09:01:07.474397 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:01:11 crc kubenswrapper[4711]: I0123 09:01:11.581831 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-59dd8b7cbf-gw4s9_a7d6e419-04ce-4f5c-93a9-34d14a8c531a/manager/0.log" Jan 23 09:01:12 crc kubenswrapper[4711]: I0123 09:01:12.025966 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-69cf5d4557-kjjf2_0c4bb18f-fc6b-49ea-a9c3-971c666a935b/manager/0.log" Jan 23 09:01:12 crc kubenswrapper[4711]: I0123 09:01:12.504400 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b45d7bf98-4pw4h_c6c6b995-fa92-4cf2-87a1-361881e8c284/manager/0.log" Jan 23 09:01:12 crc kubenswrapper[4711]: I0123 09:01:12.955553 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl_c1bb2b11-1e9e-4000-996b-8097bcc3a448/extract/0.log" Jan 23 09:01:13 crc kubenswrapper[4711]: I0123 09:01:13.373279 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8_ee296785-739b-4103-84c5-ab2fe24f3a7c/extract/0.log" Jan 23 09:01:13 crc kubenswrapper[4711]: I0123 09:01:13.793056 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-78fdd796fd-62nhb_e609d803-23cf-4d04-8587-bdd492c4c4bd/manager/0.log" Jan 23 09:01:14 crc kubenswrapper[4711]: I0123 09:01:14.204368 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-sk67x_7c54a5ef-3d58-4010-875b-8b6022692c7e/manager/0.log" Jan 23 09:01:14 crc kubenswrapper[4711]: I0123 09:01:14.603820 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-v7tnr_cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed/manager/0.log" Jan 23 09:01:15 crc kubenswrapper[4711]: I0123 09:01:15.188900 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-54ccf4f85d-vs5n6_467b6d38-a02c-44f9-81bf-3bda90dc4efd/manager/0.log" Jan 23 09:01:15 crc kubenswrapper[4711]: I0123 09:01:15.603279 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-69d6c9f5b8-ghlgc_5ad48aae-ab84-4c93-9d0e-cb4fd24884dd/manager/0.log" Jan 23 09:01:16 crc kubenswrapper[4711]: I0123 09:01:16.096621 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b8b6d4659-9fsww_f57c2bcd-cd26-420c-a3f9-64b5d4d1a916/manager/0.log" Jan 23 09:01:16 crc kubenswrapper[4711]: I0123 09:01:16.553561 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-78c6999f6f-8ptr2_effbd1c1-9a1a-4a5c-9955-6a1005746383/manager/0.log" Jan 23 09:01:17 crc kubenswrapper[4711]: I0123 09:01:17.015817 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-c87fff755-bjrrb_bfc822a0-472c-4e41-99c9-35605ebea5c6/manager/0.log" Jan 23 09:01:17 crc kubenswrapper[4711]: I0123 09:01:17.432657 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5d8f59fb49-42kx2_b764e900-9f5f-49e6-b6a8-8ad55007cc54/manager/0.log" Jan 23 09:01:18 crc kubenswrapper[4711]: I0123 09:01:18.286278 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-6b684b99ff-stnvz_c9735021-bb7c-43ac-b95e-a4fd2d26c84a/manager/0.log" Jan 23 09:01:18 crc kubenswrapper[4711]: I0123 09:01:18.731279 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-index-tm9mb_0bbe37a8-f874-4b6b-8f5d-35c1f4a463cc/registry-server/0.log" Jan 23 09:01:19 crc kubenswrapper[4711]: I0123 09:01:19.145415 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7bd9774b6-cmn6r_f1b9f385-b045-407d-a56c-87750c1c5972/manager/0.log" Jan 23 09:01:19 crc kubenswrapper[4711]: I0123 09:01:19.625805 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6b68b8b854q774f_e275a9a3-3a29-498a-bea9-b545730a0301/manager/0.log" Jan 23 09:01:20 crc kubenswrapper[4711]: I0123 09:01:20.414807 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6998f5c585-t278t_72674af2-3b9e-47e6-8417-bee428fe826a/manager/0.log" Jan 23 09:01:21 crc kubenswrapper[4711]: I0123 09:01:21.342236 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-2fdmb_eb73ab49-65f1-446a-960e-035803896d9a/registry-server/0.log" Jan 23 09:01:21 crc kubenswrapper[4711]: I0123 09:01:21.474822 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:01:21 crc kubenswrapper[4711]: E0123 09:01:21.475219 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:01:21 crc kubenswrapper[4711]: I0123 09:01:21.743888 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-55db956ddc-plrlz_0b81979e-a44a-40d2-8eff-958e528d95a1/manager/0.log" Jan 23 09:01:22 crc kubenswrapper[4711]: I0123 09:01:22.247052 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5d646b7d76-ctqxt_9eb50a74-06d4-4b43-a0b9-245354b3cde7/manager/0.log" Jan 23 09:01:22 crc kubenswrapper[4711]: I0123 09:01:22.670695 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-hwttv_5726b969-f5cb-4e58-9e8f-92c001f4a7be/operator/0.log" Jan 23 09:01:23 crc kubenswrapper[4711]: I0123 09:01:23.153203 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-4lhtg_366f8b54-0e7e-4d75-9c62-d174624512e4/manager/0.log" Jan 23 09:01:23 crc kubenswrapper[4711]: I0123 09:01:23.563443 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85cd9769bb-qnzz6_793a9ab1-c243-4a56-9463-3be8147bff44/manager/0.log" Jan 23 09:01:24 crc kubenswrapper[4711]: I0123 09:01:24.066575 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-6wgwv_a1ec006d-63fd-4fac-b5f5-df222bab8638/manager/0.log" Jan 23 09:01:24 crc kubenswrapper[4711]: I0123 09:01:24.554082 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5ffb9c6597-lvcpb_458cb03b-3e35-4219-8009-08829d99da25/manager/0.log" Jan 23 09:01:34 crc kubenswrapper[4711]: I0123 09:01:34.475108 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:01:34 crc kubenswrapper[4711]: E0123 09:01:34.476716 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:01:46 crc kubenswrapper[4711]: I0123 09:01:46.473842 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:01:46 crc kubenswrapper[4711]: E0123 09:01:46.474613 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.562853 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-zltqz/must-gather-x52ds"] Jan 23 09:01:56 crc kubenswrapper[4711]: E0123 09:01:56.563920 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd607e4c-3a6a-4236-aa1f-7bae23e4db94" containerName="keystone-cron" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.563933 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd607e4c-3a6a-4236-aa1f-7bae23e4db94" containerName="keystone-cron" Jan 23 09:01:56 crc kubenswrapper[4711]: E0123 09:01:56.563949 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.563956 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:56 crc kubenswrapper[4711]: E0123 09:01:56.563965 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.563971 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.564143 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.564158 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff4efd5e-ae35-4d90-b767-7bfd505b441e" containerName="nova-manage" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.564176 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd607e4c-3a6a-4236-aa1f-7bae23e4db94" containerName="keystone-cron" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.565408 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zltqz/must-gather-x52ds" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.567859 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zltqz"/"openshift-service-ca.crt" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.568173 4711 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-zltqz"/"kube-root-ca.crt" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.585468 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zltqz/must-gather-x52ds"] Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.693282 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bz27\" (UniqueName: \"kubernetes.io/projected/b5fda0d5-3233-4592-9a13-39e692ec48c1-kube-api-access-6bz27\") pod \"must-gather-x52ds\" (UID: \"b5fda0d5-3233-4592-9a13-39e692ec48c1\") " pod="openshift-must-gather-zltqz/must-gather-x52ds" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.693394 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b5fda0d5-3233-4592-9a13-39e692ec48c1-must-gather-output\") pod \"must-gather-x52ds\" (UID: \"b5fda0d5-3233-4592-9a13-39e692ec48c1\") " pod="openshift-must-gather-zltqz/must-gather-x52ds" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.794943 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b5fda0d5-3233-4592-9a13-39e692ec48c1-must-gather-output\") pod \"must-gather-x52ds\" (UID: \"b5fda0d5-3233-4592-9a13-39e692ec48c1\") " pod="openshift-must-gather-zltqz/must-gather-x52ds" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.795081 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bz27\" (UniqueName: \"kubernetes.io/projected/b5fda0d5-3233-4592-9a13-39e692ec48c1-kube-api-access-6bz27\") pod \"must-gather-x52ds\" (UID: \"b5fda0d5-3233-4592-9a13-39e692ec48c1\") " pod="openshift-must-gather-zltqz/must-gather-x52ds" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.795526 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b5fda0d5-3233-4592-9a13-39e692ec48c1-must-gather-output\") pod \"must-gather-x52ds\" (UID: \"b5fda0d5-3233-4592-9a13-39e692ec48c1\") " pod="openshift-must-gather-zltqz/must-gather-x52ds" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.814799 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bz27\" (UniqueName: \"kubernetes.io/projected/b5fda0d5-3233-4592-9a13-39e692ec48c1-kube-api-access-6bz27\") pod \"must-gather-x52ds\" (UID: \"b5fda0d5-3233-4592-9a13-39e692ec48c1\") " pod="openshift-must-gather-zltqz/must-gather-x52ds" Jan 23 09:01:56 crc kubenswrapper[4711]: I0123 09:01:56.889615 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zltqz/must-gather-x52ds" Jan 23 09:01:57 crc kubenswrapper[4711]: I0123 09:01:57.362541 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-zltqz/must-gather-x52ds"] Jan 23 09:01:57 crc kubenswrapper[4711]: I0123 09:01:57.474057 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:01:57 crc kubenswrapper[4711]: E0123 09:01:57.474304 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:01:57 crc kubenswrapper[4711]: I0123 09:01:57.787118 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zltqz/must-gather-x52ds" event={"ID":"b5fda0d5-3233-4592-9a13-39e692ec48c1","Type":"ContainerStarted","Data":"96c381f19c1e3e66402b8700e3dfa298c9edf00b6b5b3db3dce09bf56010cb44"} Jan 23 09:02:07 crc kubenswrapper[4711]: I0123 09:02:07.859552 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zltqz/must-gather-x52ds" event={"ID":"b5fda0d5-3233-4592-9a13-39e692ec48c1","Type":"ContainerStarted","Data":"1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e"} Jan 23 09:02:08 crc kubenswrapper[4711]: I0123 09:02:08.867860 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zltqz/must-gather-x52ds" event={"ID":"b5fda0d5-3233-4592-9a13-39e692ec48c1","Type":"ContainerStarted","Data":"6d7951a0d2da48fa5d38b8a2d17d7ec23240faadc6ecb70e1882c039e0b77fe4"} Jan 23 09:02:08 crc kubenswrapper[4711]: I0123 09:02:08.882910 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-zltqz/must-gather-x52ds" podStartSLOduration=2.6701577480000003 podStartE2EDuration="12.882888789s" podCreationTimestamp="2026-01-23 09:01:56 +0000 UTC" firstStartedPulling="2026-01-23 09:01:57.370431721 +0000 UTC m=+2502.943388079" lastFinishedPulling="2026-01-23 09:02:07.583162762 +0000 UTC m=+2513.156119120" observedRunningTime="2026-01-23 09:02:08.881382293 +0000 UTC m=+2514.454338671" watchObservedRunningTime="2026-01-23 09:02:08.882888789 +0000 UTC m=+2514.455845157" Jan 23 09:02:12 crc kubenswrapper[4711]: I0123 09:02:12.473811 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:02:12 crc kubenswrapper[4711]: E0123 09:02:12.474308 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:02:26 crc kubenswrapper[4711]: I0123 09:02:26.474573 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:02:26 crc kubenswrapper[4711]: E0123 09:02:26.475198 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:02:40 crc kubenswrapper[4711]: I0123 09:02:40.473138 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:02:40 crc kubenswrapper[4711]: E0123 09:02:40.473734 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:02:54 crc kubenswrapper[4711]: I0123 09:02:54.473733 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:02:54 crc kubenswrapper[4711]: E0123 09:02:54.474427 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:02:56 crc kubenswrapper[4711]: I0123 09:02:56.053275 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv"] Jan 23 09:02:56 crc kubenswrapper[4711]: I0123 09:02:56.061620 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-4q85d"] Jan 23 09:02:56 crc kubenswrapper[4711]: I0123 09:02:56.068291 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg"] Jan 23 09:02:56 crc kubenswrapper[4711]: I0123 09:02:56.074209 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-lh44q"] Jan 23 09:02:56 crc kubenswrapper[4711]: I0123 09:02:56.080620 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell0-6077-account-create-update-dljdv"] Jan 23 09:02:56 crc kubenswrapper[4711]: I0123 09:02:56.087942 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-api-db-create-rtqsh"] Jan 23 09:02:56 crc kubenswrapper[4711]: I0123 09:02:56.094090 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5"] Jan 23 09:02:56 crc kubenswrapper[4711]: I0123 09:02:56.100448 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-api-30c6-account-create-update-jsrs5"] Jan 23 09:02:56 crc kubenswrapper[4711]: I0123 09:02:56.107282 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-api-db-create-rtqsh"] Jan 23 09:02:56 crc kubenswrapper[4711]: I0123 09:02:56.114054 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell1-044b-account-create-update-dvckg"] Jan 23 09:02:56 crc kubenswrapper[4711]: I0123 09:02:56.120727 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell1-db-create-4q85d"] Jan 23 09:02:56 crc kubenswrapper[4711]: I0123 09:02:56.126451 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-cell0-db-create-lh44q"] Jan 23 09:02:57 crc kubenswrapper[4711]: I0123 09:02:57.518995 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8aaefd64-d6df-4ecb-bdb2-ed135a281f26" path="/var/lib/kubelet/pods/8aaefd64-d6df-4ecb-bdb2-ed135a281f26/volumes" Jan 23 09:02:57 crc kubenswrapper[4711]: I0123 09:02:57.519542 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="979e054b-e3c8-42e2-926f-49d0decb456c" path="/var/lib/kubelet/pods/979e054b-e3c8-42e2-926f-49d0decb456c/volumes" Jan 23 09:02:57 crc kubenswrapper[4711]: I0123 09:02:57.520065 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af839ca6-1c3d-41e0-807c-5154f96201c0" path="/var/lib/kubelet/pods/af839ca6-1c3d-41e0-807c-5154f96201c0/volumes" Jan 23 09:02:57 crc kubenswrapper[4711]: I0123 09:02:57.520593 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b032059c-7845-4107-b676-1e1d66d18d16" path="/var/lib/kubelet/pods/b032059c-7845-4107-b676-1e1d66d18d16/volumes" Jan 23 09:02:57 crc kubenswrapper[4711]: I0123 09:02:57.521502 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5263b1b-5e34-49db-a73a-4e179736aae9" path="/var/lib/kubelet/pods/e5263b1b-5e34-49db-a73a-4e179736aae9/volumes" Jan 23 09:02:57 crc kubenswrapper[4711]: I0123 09:02:57.521994 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efaa3835-9199-4083-b21a-fe0513b1f665" path="/var/lib/kubelet/pods/efaa3835-9199-4083-b21a-fe0513b1f665/volumes" Jan 23 09:03:05 crc kubenswrapper[4711]: I0123 09:03:05.035685 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv"] Jan 23 09:03:05 crc kubenswrapper[4711]: I0123 09:03:05.043408 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-conductor-db-sync-9zdrv"] Jan 23 09:03:05 crc kubenswrapper[4711]: I0123 09:03:05.486498 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="847224a6-d90c-4be4-a4ea-c60e7c6d9986" path="/var/lib/kubelet/pods/847224a6-d90c-4be4-a4ea-c60e7c6d9986/volumes" Jan 23 09:03:08 crc kubenswrapper[4711]: I0123 09:03:08.474079 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:03:08 crc kubenswrapper[4711]: E0123 09:03:08.474748 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:03:14 crc kubenswrapper[4711]: I0123 09:03:14.694020 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-59dd8b7cbf-gw4s9_a7d6e419-04ce-4f5c-93a9-34d14a8c531a/manager/0.log" Jan 23 09:03:14 crc kubenswrapper[4711]: I0123 09:03:14.859523 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-69cf5d4557-kjjf2_0c4bb18f-fc6b-49ea-a9c3-971c666a935b/manager/0.log" Jan 23 09:03:14 crc kubenswrapper[4711]: I0123 09:03:14.940830 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b45d7bf98-4pw4h_c6c6b995-fa92-4cf2-87a1-361881e8c284/manager/0.log" Jan 23 09:03:15 crc kubenswrapper[4711]: I0123 09:03:15.081627 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl_c1bb2b11-1e9e-4000-996b-8097bcc3a448/util/0.log" Jan 23 09:03:15 crc kubenswrapper[4711]: I0123 09:03:15.272208 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl_c1bb2b11-1e9e-4000-996b-8097bcc3a448/util/0.log" Jan 23 09:03:15 crc kubenswrapper[4711]: I0123 09:03:15.273444 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl_c1bb2b11-1e9e-4000-996b-8097bcc3a448/pull/0.log" Jan 23 09:03:15 crc kubenswrapper[4711]: I0123 09:03:15.285375 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl_c1bb2b11-1e9e-4000-996b-8097bcc3a448/pull/0.log" Jan 23 09:03:15 crc kubenswrapper[4711]: I0123 09:03:15.461435 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl_c1bb2b11-1e9e-4000-996b-8097bcc3a448/util/0.log" Jan 23 09:03:15 crc kubenswrapper[4711]: I0123 09:03:15.465818 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl_c1bb2b11-1e9e-4000-996b-8097bcc3a448/extract/0.log" Jan 23 09:03:15 crc kubenswrapper[4711]: I0123 09:03:15.468324 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e42aa4e3ad74eb2297608447df0c603c94fadffad270ea8e44fbb41e17m2mfl_c1bb2b11-1e9e-4000-996b-8097bcc3a448/pull/0.log" Jan 23 09:03:15 crc kubenswrapper[4711]: I0123 09:03:15.647660 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8_ee296785-739b-4103-84c5-ab2fe24f3a7c/util/0.log" Jan 23 09:03:15 crc kubenswrapper[4711]: I0123 09:03:15.828416 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8_ee296785-739b-4103-84c5-ab2fe24f3a7c/util/0.log" Jan 23 09:03:15 crc kubenswrapper[4711]: I0123 09:03:15.848038 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8_ee296785-739b-4103-84c5-ab2fe24f3a7c/pull/0.log" Jan 23 09:03:15 crc kubenswrapper[4711]: I0123 09:03:15.860167 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8_ee296785-739b-4103-84c5-ab2fe24f3a7c/pull/0.log" Jan 23 09:03:16 crc kubenswrapper[4711]: I0123 09:03:16.031764 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8_ee296785-739b-4103-84c5-ab2fe24f3a7c/util/0.log" Jan 23 09:03:16 crc kubenswrapper[4711]: I0123 09:03:16.051042 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8_ee296785-739b-4103-84c5-ab2fe24f3a7c/pull/0.log" Jan 23 09:03:16 crc kubenswrapper[4711]: I0123 09:03:16.074377 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fe2ef54ef9a19c58ac61dd920bb570d0ddf247ed2c463728675e9970ea27dw8_ee296785-739b-4103-84c5-ab2fe24f3a7c/extract/0.log" Jan 23 09:03:16 crc kubenswrapper[4711]: I0123 09:03:16.207363 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-78fdd796fd-62nhb_e609d803-23cf-4d04-8587-bdd492c4c4bd/manager/0.log" Jan 23 09:03:16 crc kubenswrapper[4711]: I0123 09:03:16.235244 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-sk67x_7c54a5ef-3d58-4010-875b-8b6022692c7e/manager/0.log" Jan 23 09:03:16 crc kubenswrapper[4711]: I0123 09:03:16.385485 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-v7tnr_cb3cde58-59aa-41dc-a4f1-8fadd07dd1ed/manager/0.log" Jan 23 09:03:16 crc kubenswrapper[4711]: I0123 09:03:16.495291 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-54ccf4f85d-vs5n6_467b6d38-a02c-44f9-81bf-3bda90dc4efd/manager/0.log" Jan 23 09:03:16 crc kubenswrapper[4711]: I0123 09:03:16.584044 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-69d6c9f5b8-ghlgc_5ad48aae-ab84-4c93-9d0e-cb4fd24884dd/manager/0.log" Jan 23 09:03:16 crc kubenswrapper[4711]: I0123 09:03:16.720633 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b8b6d4659-9fsww_f57c2bcd-cd26-420c-a3f9-64b5d4d1a916/manager/0.log" Jan 23 09:03:16 crc kubenswrapper[4711]: I0123 09:03:16.811287 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-78c6999f6f-8ptr2_effbd1c1-9a1a-4a5c-9955-6a1005746383/manager/0.log" Jan 23 09:03:16 crc kubenswrapper[4711]: I0123 09:03:16.922896 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-c87fff755-bjrrb_bfc822a0-472c-4e41-99c9-35605ebea5c6/manager/0.log" Jan 23 09:03:16 crc kubenswrapper[4711]: I0123 09:03:16.998738 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5d8f59fb49-42kx2_b764e900-9f5f-49e6-b6a8-8ad55007cc54/manager/0.log" Jan 23 09:03:17 crc kubenswrapper[4711]: I0123 09:03:17.232888 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-index-tm9mb_0bbe37a8-f874-4b6b-8f5d-35c1f4a463cc/registry-server/0.log" Jan 23 09:03:17 crc kubenswrapper[4711]: I0123 09:03:17.416842 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7bd9774b6-cmn6r_f1b9f385-b045-407d-a56c-87750c1c5972/manager/0.log" Jan 23 09:03:17 crc kubenswrapper[4711]: I0123 09:03:17.485297 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-6b684b99ff-stnvz_c9735021-bb7c-43ac-b95e-a4fd2d26c84a/manager/0.log" Jan 23 09:03:17 crc kubenswrapper[4711]: I0123 09:03:17.557072 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6b68b8b854q774f_e275a9a3-3a29-498a-bea9-b545730a0301/manager/0.log" Jan 23 09:03:17 crc kubenswrapper[4711]: I0123 09:03:17.813466 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-2fdmb_eb73ab49-65f1-446a-960e-035803896d9a/registry-server/0.log" Jan 23 09:03:17 crc kubenswrapper[4711]: I0123 09:03:17.901012 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6998f5c585-t278t_72674af2-3b9e-47e6-8417-bee428fe826a/manager/0.log" Jan 23 09:03:17 crc kubenswrapper[4711]: I0123 09:03:17.927442 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-55db956ddc-plrlz_0b81979e-a44a-40d2-8eff-958e528d95a1/manager/0.log" Jan 23 09:03:18 crc kubenswrapper[4711]: I0123 09:03:18.069525 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5d646b7d76-ctqxt_9eb50a74-06d4-4b43-a0b9-245354b3cde7/manager/0.log" Jan 23 09:03:18 crc kubenswrapper[4711]: I0123 09:03:18.100685 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-hwttv_5726b969-f5cb-4e58-9e8f-92c001f4a7be/operator/0.log" Jan 23 09:03:18 crc kubenswrapper[4711]: I0123 09:03:18.259058 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-4lhtg_366f8b54-0e7e-4d75-9c62-d174624512e4/manager/0.log" Jan 23 09:03:18 crc kubenswrapper[4711]: I0123 09:03:18.502039 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85cd9769bb-qnzz6_793a9ab1-c243-4a56-9463-3be8147bff44/manager/0.log" Jan 23 09:03:18 crc kubenswrapper[4711]: I0123 09:03:18.678113 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-6wgwv_a1ec006d-63fd-4fac-b5f5-df222bab8638/manager/0.log" Jan 23 09:03:18 crc kubenswrapper[4711]: I0123 09:03:18.723006 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5ffb9c6597-lvcpb_458cb03b-3e35-4219-8009-08829d99da25/manager/0.log" Jan 23 09:03:22 crc kubenswrapper[4711]: I0123 09:03:22.473658 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:03:22 crc kubenswrapper[4711]: E0123 09:03:22.474130 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:03:22 crc kubenswrapper[4711]: I0123 09:03:22.609102 4711 scope.go:117] "RemoveContainer" containerID="33dc4c76a175b75cec7c2a453f73a1fe297bf9e6ce5ea0498a73028452d12079" Jan 23 09:03:22 crc kubenswrapper[4711]: I0123 09:03:22.631319 4711 scope.go:117] "RemoveContainer" containerID="342a94288bda567d38e56278eae18980f33e5376cfd98dc1ff7ba4bbbe7999f7" Jan 23 09:03:22 crc kubenswrapper[4711]: I0123 09:03:22.666707 4711 scope.go:117] "RemoveContainer" containerID="d6d6e6835d273b23e13d45475d3dd64ccbabb1cb78c758ca7934bc9df32bb17e" Jan 23 09:03:22 crc kubenswrapper[4711]: I0123 09:03:22.695901 4711 scope.go:117] "RemoveContainer" containerID="830f6de51408cb657d26379fb5bed5e9675ccd99efcc4151de795194d1d88c8e" Jan 23 09:03:22 crc kubenswrapper[4711]: I0123 09:03:22.732586 4711 scope.go:117] "RemoveContainer" containerID="6bb3d22d703dd9c6bbba85dda7c89041ba903a2bccfce80525e246e5664ca5d0" Jan 23 09:03:22 crc kubenswrapper[4711]: I0123 09:03:22.811896 4711 scope.go:117] "RemoveContainer" containerID="f25c7e1f0ac7bcd46d1718f8839fd86e10c37c765e437ee25c5d4f5ab9988c27" Jan 23 09:03:22 crc kubenswrapper[4711]: I0123 09:03:22.830813 4711 scope.go:117] "RemoveContainer" containerID="ae111dc8f9f752e37b2bd0eaefd038cd79f089329a361c8c2bef32721e850145" Jan 23 09:03:23 crc kubenswrapper[4711]: I0123 09:03:23.026279 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7"] Jan 23 09:03:23 crc kubenswrapper[4711]: I0123 09:03:23.032443 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-conductor-db-sync-82pn7"] Jan 23 09:03:23 crc kubenswrapper[4711]: I0123 09:03:23.483560 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee1bec46-a27c-4fac-b0c7-71eb3671700e" path="/var/lib/kubelet/pods/ee1bec46-a27c-4fac-b0c7-71eb3671700e/volumes" Jan 23 09:03:24 crc kubenswrapper[4711]: I0123 09:03:24.025691 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc"] Jan 23 09:03:24 crc kubenswrapper[4711]: I0123 09:03:24.031096 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell0-cell-mapping-4nfnc"] Jan 23 09:03:25 crc kubenswrapper[4711]: I0123 09:03:25.483767 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e42940a-1607-4217-b21e-789504a59b2d" path="/var/lib/kubelet/pods/4e42940a-1607-4217-b21e-789504a59b2d/volumes" Jan 23 09:03:35 crc kubenswrapper[4711]: I0123 09:03:35.479463 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:03:35 crc kubenswrapper[4711]: E0123 09:03:35.480361 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:03:36 crc kubenswrapper[4711]: I0123 09:03:36.602965 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5mwhk_fd4bd624-70bb-4602-a4f3-6824c59f90a4/control-plane-machine-set-operator/0.log" Jan 23 09:03:36 crc kubenswrapper[4711]: I0123 09:03:36.764563 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fzkjs_9afbdb97-d93a-494f-8ad0-23179afbee6d/kube-rbac-proxy/0.log" Jan 23 09:03:36 crc kubenswrapper[4711]: I0123 09:03:36.825740 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fzkjs_9afbdb97-d93a-494f-8ad0-23179afbee6d/machine-api-operator/0.log" Jan 23 09:03:42 crc kubenswrapper[4711]: I0123 09:03:42.043933 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc"] Jan 23 09:03:42 crc kubenswrapper[4711]: I0123 09:03:42.052630 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-mapping-2mtcc"] Jan 23 09:03:43 crc kubenswrapper[4711]: I0123 09:03:43.482758 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5518fb5f-7762-4beb-b66e-c5d463c3a672" path="/var/lib/kubelet/pods/5518fb5f-7762-4beb-b66e-c5d463c3a672/volumes" Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.111731 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mj9n8"] Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.114022 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.130771 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nljzl\" (UniqueName: \"kubernetes.io/projected/873746af-5cbc-4e4f-8a22-71fc0d57cded-kube-api-access-nljzl\") pod \"certified-operators-mj9n8\" (UID: \"873746af-5cbc-4e4f-8a22-71fc0d57cded\") " pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.131030 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/873746af-5cbc-4e4f-8a22-71fc0d57cded-utilities\") pod \"certified-operators-mj9n8\" (UID: \"873746af-5cbc-4e4f-8a22-71fc0d57cded\") " pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.131160 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/873746af-5cbc-4e4f-8a22-71fc0d57cded-catalog-content\") pod \"certified-operators-mj9n8\" (UID: \"873746af-5cbc-4e4f-8a22-71fc0d57cded\") " pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.131816 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mj9n8"] Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.232702 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/873746af-5cbc-4e4f-8a22-71fc0d57cded-utilities\") pod \"certified-operators-mj9n8\" (UID: \"873746af-5cbc-4e4f-8a22-71fc0d57cded\") " pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.233133 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/873746af-5cbc-4e4f-8a22-71fc0d57cded-catalog-content\") pod \"certified-operators-mj9n8\" (UID: \"873746af-5cbc-4e4f-8a22-71fc0d57cded\") " pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.233273 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nljzl\" (UniqueName: \"kubernetes.io/projected/873746af-5cbc-4e4f-8a22-71fc0d57cded-kube-api-access-nljzl\") pod \"certified-operators-mj9n8\" (UID: \"873746af-5cbc-4e4f-8a22-71fc0d57cded\") " pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.233975 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/873746af-5cbc-4e4f-8a22-71fc0d57cded-catalog-content\") pod \"certified-operators-mj9n8\" (UID: \"873746af-5cbc-4e4f-8a22-71fc0d57cded\") " pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.234320 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/873746af-5cbc-4e4f-8a22-71fc0d57cded-utilities\") pod \"certified-operators-mj9n8\" (UID: \"873746af-5cbc-4e4f-8a22-71fc0d57cded\") " pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.253725 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nljzl\" (UniqueName: \"kubernetes.io/projected/873746af-5cbc-4e4f-8a22-71fc0d57cded-kube-api-access-nljzl\") pod \"certified-operators-mj9n8\" (UID: \"873746af-5cbc-4e4f-8a22-71fc0d57cded\") " pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.435043 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:44 crc kubenswrapper[4711]: I0123 09:03:44.887533 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mj9n8"] Jan 23 09:03:45 crc kubenswrapper[4711]: I0123 09:03:45.616917 4711 generic.go:334] "Generic (PLEG): container finished" podID="873746af-5cbc-4e4f-8a22-71fc0d57cded" containerID="c8dedfcf0df20fbd7e544c9b9369d0831711fe083ae6901558a75dfff37a2fed" exitCode=0 Jan 23 09:03:45 crc kubenswrapper[4711]: I0123 09:03:45.616965 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9n8" event={"ID":"873746af-5cbc-4e4f-8a22-71fc0d57cded","Type":"ContainerDied","Data":"c8dedfcf0df20fbd7e544c9b9369d0831711fe083ae6901558a75dfff37a2fed"} Jan 23 09:03:45 crc kubenswrapper[4711]: I0123 09:03:45.617278 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9n8" event={"ID":"873746af-5cbc-4e4f-8a22-71fc0d57cded","Type":"ContainerStarted","Data":"ad5d71fbff26d4c572aec13550bdd1f20cfdb0c48fd688bf2614e043ad1ecd63"} Jan 23 09:03:45 crc kubenswrapper[4711]: I0123 09:03:45.619830 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 23 09:03:47 crc kubenswrapper[4711]: I0123 09:03:47.473714 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:03:47 crc kubenswrapper[4711]: E0123 09:03:47.475047 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:03:49 crc kubenswrapper[4711]: I0123 09:03:49.372761 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-8m4r4_2a47eec5-3714-4461-841a-d47b62502c91/cert-manager-controller/0.log" Jan 23 09:03:49 crc kubenswrapper[4711]: I0123 09:03:49.530200 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-zsgc9_de49b0b5-98ed-4f89-bf42-7d22260d8bb4/cert-manager-cainjector/0.log" Jan 23 09:03:49 crc kubenswrapper[4711]: I0123 09:03:49.582168 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-t9x8z_f4626f45-a992-460a-833a-db30b2e83041/cert-manager-webhook/0.log" Jan 23 09:03:49 crc kubenswrapper[4711]: I0123 09:03:49.657527 4711 generic.go:334] "Generic (PLEG): container finished" podID="873746af-5cbc-4e4f-8a22-71fc0d57cded" containerID="424d361e9880462c9b0a40c0945bf379d1cb89c73637e126232b6860cb71b97b" exitCode=0 Jan 23 09:03:49 crc kubenswrapper[4711]: I0123 09:03:49.657581 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9n8" event={"ID":"873746af-5cbc-4e4f-8a22-71fc0d57cded","Type":"ContainerDied","Data":"424d361e9880462c9b0a40c0945bf379d1cb89c73637e126232b6860cb71b97b"} Jan 23 09:03:51 crc kubenswrapper[4711]: I0123 09:03:51.679875 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9n8" event={"ID":"873746af-5cbc-4e4f-8a22-71fc0d57cded","Type":"ContainerStarted","Data":"a1aedbd82993ef08b227873769b98ef3ce1f2847e8f799c78c59250277d33c37"} Jan 23 09:03:51 crc kubenswrapper[4711]: I0123 09:03:51.704399 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mj9n8" podStartSLOduration=2.746474167 podStartE2EDuration="7.704378554s" podCreationTimestamp="2026-01-23 09:03:44 +0000 UTC" firstStartedPulling="2026-01-23 09:03:45.619580363 +0000 UTC m=+2611.192536731" lastFinishedPulling="2026-01-23 09:03:50.57748475 +0000 UTC m=+2616.150441118" observedRunningTime="2026-01-23 09:03:51.699428172 +0000 UTC m=+2617.272384560" watchObservedRunningTime="2026-01-23 09:03:51.704378554 +0000 UTC m=+2617.277334922" Jan 23 09:03:54 crc kubenswrapper[4711]: I0123 09:03:54.435807 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:54 crc kubenswrapper[4711]: I0123 09:03:54.436985 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:03:54 crc kubenswrapper[4711]: I0123 09:03:54.508434 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:04:01 crc kubenswrapper[4711]: I0123 09:04:01.473817 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:04:01 crc kubenswrapper[4711]: E0123 09:04:01.475692 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:04:01 crc kubenswrapper[4711]: I0123 09:04:01.629070 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-r5vd9_27dd60bd-f0ca-4e99-b5ff-70d34f58cf63/nmstate-console-plugin/0.log" Jan 23 09:04:01 crc kubenswrapper[4711]: I0123 09:04:01.751646 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-htpp8_2e3923e6-c7fb-4a8a-a621-86a8799f4525/nmstate-handler/0.log" Jan 23 09:04:01 crc kubenswrapper[4711]: I0123 09:04:01.846136 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-nbz56_56aed6bb-34ef-4b46-a0e8-a3da8931d069/kube-rbac-proxy/0.log" Jan 23 09:04:01 crc kubenswrapper[4711]: I0123 09:04:01.934384 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-nbz56_56aed6bb-34ef-4b46-a0e8-a3da8931d069/nmstate-metrics/0.log" Jan 23 09:04:02 crc kubenswrapper[4711]: I0123 09:04:02.013441 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-k54qf_6697496e-11e9-4b32-8e2f-d485235b0d8a/nmstate-operator/0.log" Jan 23 09:04:02 crc kubenswrapper[4711]: I0123 09:04:02.132584 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-bzrxp_e200b62d-4a7a-4726-9650-3ac95e53ba0d/nmstate-webhook/0.log" Jan 23 09:04:04 crc kubenswrapper[4711]: I0123 09:04:04.480734 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:04:04 crc kubenswrapper[4711]: I0123 09:04:04.536119 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mj9n8"] Jan 23 09:04:04 crc kubenswrapper[4711]: I0123 09:04:04.788923 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mj9n8" podUID="873746af-5cbc-4e4f-8a22-71fc0d57cded" containerName="registry-server" containerID="cri-o://a1aedbd82993ef08b227873769b98ef3ce1f2847e8f799c78c59250277d33c37" gracePeriod=2 Jan 23 09:04:06 crc kubenswrapper[4711]: I0123 09:04:06.804666 4711 generic.go:334] "Generic (PLEG): container finished" podID="873746af-5cbc-4e4f-8a22-71fc0d57cded" containerID="a1aedbd82993ef08b227873769b98ef3ce1f2847e8f799c78c59250277d33c37" exitCode=0 Jan 23 09:04:06 crc kubenswrapper[4711]: I0123 09:04:06.804711 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9n8" event={"ID":"873746af-5cbc-4e4f-8a22-71fc0d57cded","Type":"ContainerDied","Data":"a1aedbd82993ef08b227873769b98ef3ce1f2847e8f799c78c59250277d33c37"} Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.119749 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.313843 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nljzl\" (UniqueName: \"kubernetes.io/projected/873746af-5cbc-4e4f-8a22-71fc0d57cded-kube-api-access-nljzl\") pod \"873746af-5cbc-4e4f-8a22-71fc0d57cded\" (UID: \"873746af-5cbc-4e4f-8a22-71fc0d57cded\") " Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.314070 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/873746af-5cbc-4e4f-8a22-71fc0d57cded-catalog-content\") pod \"873746af-5cbc-4e4f-8a22-71fc0d57cded\" (UID: \"873746af-5cbc-4e4f-8a22-71fc0d57cded\") " Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.314151 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/873746af-5cbc-4e4f-8a22-71fc0d57cded-utilities\") pod \"873746af-5cbc-4e4f-8a22-71fc0d57cded\" (UID: \"873746af-5cbc-4e4f-8a22-71fc0d57cded\") " Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.315108 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/873746af-5cbc-4e4f-8a22-71fc0d57cded-utilities" (OuterVolumeSpecName: "utilities") pod "873746af-5cbc-4e4f-8a22-71fc0d57cded" (UID: "873746af-5cbc-4e4f-8a22-71fc0d57cded"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.324685 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/873746af-5cbc-4e4f-8a22-71fc0d57cded-kube-api-access-nljzl" (OuterVolumeSpecName: "kube-api-access-nljzl") pod "873746af-5cbc-4e4f-8a22-71fc0d57cded" (UID: "873746af-5cbc-4e4f-8a22-71fc0d57cded"). InnerVolumeSpecName "kube-api-access-nljzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.365701 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vkcn9"] Jan 23 09:04:07 crc kubenswrapper[4711]: E0123 09:04:07.366089 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="873746af-5cbc-4e4f-8a22-71fc0d57cded" containerName="extract-utilities" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.366104 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="873746af-5cbc-4e4f-8a22-71fc0d57cded" containerName="extract-utilities" Jan 23 09:04:07 crc kubenswrapper[4711]: E0123 09:04:07.366122 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="873746af-5cbc-4e4f-8a22-71fc0d57cded" containerName="registry-server" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.366128 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="873746af-5cbc-4e4f-8a22-71fc0d57cded" containerName="registry-server" Jan 23 09:04:07 crc kubenswrapper[4711]: E0123 09:04:07.366142 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="873746af-5cbc-4e4f-8a22-71fc0d57cded" containerName="extract-content" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.366149 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="873746af-5cbc-4e4f-8a22-71fc0d57cded" containerName="extract-content" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.366306 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="873746af-5cbc-4e4f-8a22-71fc0d57cded" containerName="registry-server" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.367446 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.374113 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/873746af-5cbc-4e4f-8a22-71fc0d57cded-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "873746af-5cbc-4e4f-8a22-71fc0d57cded" (UID: "873746af-5cbc-4e4f-8a22-71fc0d57cded"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.383648 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vkcn9"] Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.415638 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/873746af-5cbc-4e4f-8a22-71fc0d57cded-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.415675 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nljzl\" (UniqueName: \"kubernetes.io/projected/873746af-5cbc-4e4f-8a22-71fc0d57cded-kube-api-access-nljzl\") on node \"crc\" DevicePath \"\"" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.415687 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/873746af-5cbc-4e4f-8a22-71fc0d57cded-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.520387 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlv75\" (UniqueName: \"kubernetes.io/projected/e844965e-09de-46d8-975d-79a12eb959c3-kube-api-access-rlv75\") pod \"redhat-operators-vkcn9\" (UID: \"e844965e-09de-46d8-975d-79a12eb959c3\") " pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.520476 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e844965e-09de-46d8-975d-79a12eb959c3-utilities\") pod \"redhat-operators-vkcn9\" (UID: \"e844965e-09de-46d8-975d-79a12eb959c3\") " pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.520561 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e844965e-09de-46d8-975d-79a12eb959c3-catalog-content\") pod \"redhat-operators-vkcn9\" (UID: \"e844965e-09de-46d8-975d-79a12eb959c3\") " pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.621630 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e844965e-09de-46d8-975d-79a12eb959c3-catalog-content\") pod \"redhat-operators-vkcn9\" (UID: \"e844965e-09de-46d8-975d-79a12eb959c3\") " pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.622287 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlv75\" (UniqueName: \"kubernetes.io/projected/e844965e-09de-46d8-975d-79a12eb959c3-kube-api-access-rlv75\") pod \"redhat-operators-vkcn9\" (UID: \"e844965e-09de-46d8-975d-79a12eb959c3\") " pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.622456 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e844965e-09de-46d8-975d-79a12eb959c3-utilities\") pod \"redhat-operators-vkcn9\" (UID: \"e844965e-09de-46d8-975d-79a12eb959c3\") " pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.622173 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e844965e-09de-46d8-975d-79a12eb959c3-catalog-content\") pod \"redhat-operators-vkcn9\" (UID: \"e844965e-09de-46d8-975d-79a12eb959c3\") " pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.622757 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e844965e-09de-46d8-975d-79a12eb959c3-utilities\") pod \"redhat-operators-vkcn9\" (UID: \"e844965e-09de-46d8-975d-79a12eb959c3\") " pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.642236 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlv75\" (UniqueName: \"kubernetes.io/projected/e844965e-09de-46d8-975d-79a12eb959c3-kube-api-access-rlv75\") pod \"redhat-operators-vkcn9\" (UID: \"e844965e-09de-46d8-975d-79a12eb959c3\") " pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.697206 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.825729 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mj9n8" event={"ID":"873746af-5cbc-4e4f-8a22-71fc0d57cded","Type":"ContainerDied","Data":"ad5d71fbff26d4c572aec13550bdd1f20cfdb0c48fd688bf2614e043ad1ecd63"} Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.825783 4711 scope.go:117] "RemoveContainer" containerID="a1aedbd82993ef08b227873769b98ef3ce1f2847e8f799c78c59250277d33c37" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.825868 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mj9n8" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.853322 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mj9n8"] Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.865426 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mj9n8"] Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.868584 4711 scope.go:117] "RemoveContainer" containerID="424d361e9880462c9b0a40c0945bf379d1cb89c73637e126232b6860cb71b97b" Jan 23 09:04:07 crc kubenswrapper[4711]: I0123 09:04:07.926842 4711 scope.go:117] "RemoveContainer" containerID="c8dedfcf0df20fbd7e544c9b9369d0831711fe083ae6901558a75dfff37a2fed" Jan 23 09:04:08 crc kubenswrapper[4711]: I0123 09:04:08.199878 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vkcn9"] Jan 23 09:04:08 crc kubenswrapper[4711]: I0123 09:04:08.836255 4711 generic.go:334] "Generic (PLEG): container finished" podID="e844965e-09de-46d8-975d-79a12eb959c3" containerID="bc3186fd3359994a046275c5cbbfec22e69eef8da7b546379cf1218e7fdfaf74" exitCode=0 Jan 23 09:04:08 crc kubenswrapper[4711]: I0123 09:04:08.836368 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vkcn9" event={"ID":"e844965e-09de-46d8-975d-79a12eb959c3","Type":"ContainerDied","Data":"bc3186fd3359994a046275c5cbbfec22e69eef8da7b546379cf1218e7fdfaf74"} Jan 23 09:04:08 crc kubenswrapper[4711]: I0123 09:04:08.836547 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vkcn9" event={"ID":"e844965e-09de-46d8-975d-79a12eb959c3","Type":"ContainerStarted","Data":"8ad76420828cb7b916d1931db43dfb1f82a6dc6b9e164ab8a1ec55bdb0e780f3"} Jan 23 09:04:09 crc kubenswrapper[4711]: I0123 09:04:09.484647 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="873746af-5cbc-4e4f-8a22-71fc0d57cded" path="/var/lib/kubelet/pods/873746af-5cbc-4e4f-8a22-71fc0d57cded/volumes" Jan 23 09:04:10 crc kubenswrapper[4711]: I0123 09:04:10.854622 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vkcn9" event={"ID":"e844965e-09de-46d8-975d-79a12eb959c3","Type":"ContainerStarted","Data":"4a663f712eff0fb6bf571e98a812b0a79246b8c7aee00e52e3ae7bb8442029ec"} Jan 23 09:04:11 crc kubenswrapper[4711]: I0123 09:04:11.865486 4711 generic.go:334] "Generic (PLEG): container finished" podID="e844965e-09de-46d8-975d-79a12eb959c3" containerID="4a663f712eff0fb6bf571e98a812b0a79246b8c7aee00e52e3ae7bb8442029ec" exitCode=0 Jan 23 09:04:11 crc kubenswrapper[4711]: I0123 09:04:11.865790 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vkcn9" event={"ID":"e844965e-09de-46d8-975d-79a12eb959c3","Type":"ContainerDied","Data":"4a663f712eff0fb6bf571e98a812b0a79246b8c7aee00e52e3ae7bb8442029ec"} Jan 23 09:04:13 crc kubenswrapper[4711]: I0123 09:04:13.476254 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:04:13 crc kubenswrapper[4711]: E0123 09:04:13.476497 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:04:20 crc kubenswrapper[4711]: I0123 09:04:20.934085 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vkcn9" event={"ID":"e844965e-09de-46d8-975d-79a12eb959c3","Type":"ContainerStarted","Data":"da55ed96098fdb2730ac9e09951fedb05f73201b16fd51eca5ac07fec4388ded"} Jan 23 09:04:21 crc kubenswrapper[4711]: I0123 09:04:21.963894 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vkcn9" podStartSLOduration=4.437987321 podStartE2EDuration="14.963877651s" podCreationTimestamp="2026-01-23 09:04:07 +0000 UTC" firstStartedPulling="2026-01-23 09:04:08.838522154 +0000 UTC m=+2634.411478522" lastFinishedPulling="2026-01-23 09:04:19.364412484 +0000 UTC m=+2644.937368852" observedRunningTime="2026-01-23 09:04:21.960755585 +0000 UTC m=+2647.533711953" watchObservedRunningTime="2026-01-23 09:04:21.963877651 +0000 UTC m=+2647.536834009" Jan 23 09:04:22 crc kubenswrapper[4711]: I0123 09:04:22.939642 4711 scope.go:117] "RemoveContainer" containerID="dedb0859c4dece08baa08290f8f1a1da1209d40672e72b589923b7e0061358c4" Jan 23 09:04:22 crc kubenswrapper[4711]: I0123 09:04:22.985954 4711 scope.go:117] "RemoveContainer" containerID="5b1aa2303b3b35c37b8d8d0638073afef75d1a12c293176098ebc8ee8d5b784b" Jan 23 09:04:23 crc kubenswrapper[4711]: I0123 09:04:23.036912 4711 scope.go:117] "RemoveContainer" containerID="45600017a11c1fc9751cd6e1c516c258c80f0ea63de60a23b4b6fff689a42150" Jan 23 09:04:27 crc kubenswrapper[4711]: I0123 09:04:27.473763 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:04:27 crc kubenswrapper[4711]: E0123 09:04:27.474568 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:04:27 crc kubenswrapper[4711]: I0123 09:04:27.698118 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:27 crc kubenswrapper[4711]: I0123 09:04:27.698825 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:27 crc kubenswrapper[4711]: I0123 09:04:27.740633 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:28 crc kubenswrapper[4711]: I0123 09:04:28.028654 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:28 crc kubenswrapper[4711]: I0123 09:04:28.076798 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vkcn9"] Jan 23 09:04:29 crc kubenswrapper[4711]: I0123 09:04:29.858225 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-jz2p2_65ea66aa-64dd-4c97-9d69-4984dac21b0f/kube-rbac-proxy/0.log" Jan 23 09:04:29 crc kubenswrapper[4711]: I0123 09:04:29.979900 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-jz2p2_65ea66aa-64dd-4c97-9d69-4984dac21b0f/controller/0.log" Jan 23 09:04:29 crc kubenswrapper[4711]: I0123 09:04:29.999184 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vkcn9" podUID="e844965e-09de-46d8-975d-79a12eb959c3" containerName="registry-server" containerID="cri-o://da55ed96098fdb2730ac9e09951fedb05f73201b16fd51eca5ac07fec4388ded" gracePeriod=2 Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.077018 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/cp-frr-files/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.280437 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/cp-frr-files/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.286713 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/cp-reloader/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.324017 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/cp-metrics/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.356028 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/cp-reloader/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.523922 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/cp-frr-files/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.529028 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/cp-metrics/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.550026 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/cp-reloader/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.572197 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/cp-metrics/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.718584 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/cp-reloader/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.725630 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/cp-frr-files/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.788699 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/cp-metrics/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.797608 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/controller/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.920261 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/frr-metrics/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.984806 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/kube-rbac-proxy-frr/0.log" Jan 23 09:04:30 crc kubenswrapper[4711]: I0123 09:04:30.987480 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/kube-rbac-proxy/0.log" Jan 23 09:04:31 crc kubenswrapper[4711]: I0123 09:04:31.136778 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/reloader/0.log" Jan 23 09:04:31 crc kubenswrapper[4711]: I0123 09:04:31.174341 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-th8jp_cb216e80-98b3-46dc-b45e-86407aa0fc7b/frr-k8s-webhook-server/0.log" Jan 23 09:04:31 crc kubenswrapper[4711]: I0123 09:04:31.469132 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6548fb4d7d-g45jd_413bb353-7a3e-4b0f-a146-414e9aa93903/webhook-server/0.log" Jan 23 09:04:31 crc kubenswrapper[4711]: I0123 09:04:31.909960 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-565698bc67-8zpqd_11234ef0-6e4d-4b5c-9eeb-d1e37185edb1/manager/0.log" Jan 23 09:04:32 crc kubenswrapper[4711]: I0123 09:04:32.004241 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-kkqsn_f1cfbcc9-7d53-4ec2-806c-c6070123c2fe/kube-rbac-proxy/0.log" Jan 23 09:04:37 crc kubenswrapper[4711]: E0123 09:04:37.698628 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of da55ed96098fdb2730ac9e09951fedb05f73201b16fd51eca5ac07fec4388ded is running failed: container process not found" containerID="da55ed96098fdb2730ac9e09951fedb05f73201b16fd51eca5ac07fec4388ded" cmd=["grpc_health_probe","-addr=:50051"] Jan 23 09:04:37 crc kubenswrapper[4711]: E0123 09:04:37.699646 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of da55ed96098fdb2730ac9e09951fedb05f73201b16fd51eca5ac07fec4388ded is running failed: container process not found" containerID="da55ed96098fdb2730ac9e09951fedb05f73201b16fd51eca5ac07fec4388ded" cmd=["grpc_health_probe","-addr=:50051"] Jan 23 09:04:37 crc kubenswrapper[4711]: E0123 09:04:37.699898 4711 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of da55ed96098fdb2730ac9e09951fedb05f73201b16fd51eca5ac07fec4388ded is running failed: container process not found" containerID="da55ed96098fdb2730ac9e09951fedb05f73201b16fd51eca5ac07fec4388ded" cmd=["grpc_health_probe","-addr=:50051"] Jan 23 09:04:37 crc kubenswrapper[4711]: E0123 09:04:37.699930 4711 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of da55ed96098fdb2730ac9e09951fedb05f73201b16fd51eca5ac07fec4388ded is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-vkcn9" podUID="e844965e-09de-46d8-975d-79a12eb959c3" containerName="registry-server" Jan 23 09:04:41 crc kubenswrapper[4711]: I0123 09:04:41.473937 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:04:41 crc kubenswrapper[4711]: E0123 09:04:41.474403 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:04:43 crc kubenswrapper[4711]: I0123 09:04:43.836985 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-kkqsn_f1cfbcc9-7d53-4ec2-806c-c6070123c2fe/speaker/0.log" Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.131715 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-vkcn9_e844965e-09de-46d8-975d-79a12eb959c3/registry-server/0.log" Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.133631 4711 generic.go:334] "Generic (PLEG): container finished" podID="e844965e-09de-46d8-975d-79a12eb959c3" containerID="da55ed96098fdb2730ac9e09951fedb05f73201b16fd51eca5ac07fec4388ded" exitCode=-1 Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.133694 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vkcn9" event={"ID":"e844965e-09de-46d8-975d-79a12eb959c3","Type":"ContainerDied","Data":"da55ed96098fdb2730ac9e09951fedb05f73201b16fd51eca5ac07fec4388ded"} Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.573933 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.638445 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e844965e-09de-46d8-975d-79a12eb959c3-catalog-content\") pod \"e844965e-09de-46d8-975d-79a12eb959c3\" (UID: \"e844965e-09de-46d8-975d-79a12eb959c3\") " Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.638609 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlv75\" (UniqueName: \"kubernetes.io/projected/e844965e-09de-46d8-975d-79a12eb959c3-kube-api-access-rlv75\") pod \"e844965e-09de-46d8-975d-79a12eb959c3\" (UID: \"e844965e-09de-46d8-975d-79a12eb959c3\") " Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.638643 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e844965e-09de-46d8-975d-79a12eb959c3-utilities\") pod \"e844965e-09de-46d8-975d-79a12eb959c3\" (UID: \"e844965e-09de-46d8-975d-79a12eb959c3\") " Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.640058 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e844965e-09de-46d8-975d-79a12eb959c3-utilities" (OuterVolumeSpecName: "utilities") pod "e844965e-09de-46d8-975d-79a12eb959c3" (UID: "e844965e-09de-46d8-975d-79a12eb959c3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.647006 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e844965e-09de-46d8-975d-79a12eb959c3-kube-api-access-rlv75" (OuterVolumeSpecName: "kube-api-access-rlv75") pod "e844965e-09de-46d8-975d-79a12eb959c3" (UID: "e844965e-09de-46d8-975d-79a12eb959c3"). InnerVolumeSpecName "kube-api-access-rlv75". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.732650 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-26kvs_81ea7880-2a6c-4e0a-8489-063feb2f99b7/frr/0.log" Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.741530 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlv75\" (UniqueName: \"kubernetes.io/projected/e844965e-09de-46d8-975d-79a12eb959c3-kube-api-access-rlv75\") on node \"crc\" DevicePath \"\"" Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.741697 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e844965e-09de-46d8-975d-79a12eb959c3-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.774795 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e844965e-09de-46d8-975d-79a12eb959c3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e844965e-09de-46d8-975d-79a12eb959c3" (UID: "e844965e-09de-46d8-975d-79a12eb959c3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 09:04:45 crc kubenswrapper[4711]: I0123 09:04:45.843885 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e844965e-09de-46d8-975d-79a12eb959c3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 09:04:46 crc kubenswrapper[4711]: I0123 09:04:46.142433 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vkcn9" event={"ID":"e844965e-09de-46d8-975d-79a12eb959c3","Type":"ContainerDied","Data":"8ad76420828cb7b916d1931db43dfb1f82a6dc6b9e164ab8a1ec55bdb0e780f3"} Jan 23 09:04:46 crc kubenswrapper[4711]: I0123 09:04:46.142456 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vkcn9" Jan 23 09:04:46 crc kubenswrapper[4711]: I0123 09:04:46.142541 4711 scope.go:117] "RemoveContainer" containerID="da55ed96098fdb2730ac9e09951fedb05f73201b16fd51eca5ac07fec4388ded" Jan 23 09:04:46 crc kubenswrapper[4711]: I0123 09:04:46.168902 4711 scope.go:117] "RemoveContainer" containerID="4a663f712eff0fb6bf571e98a812b0a79246b8c7aee00e52e3ae7bb8442029ec" Jan 23 09:04:46 crc kubenswrapper[4711]: I0123 09:04:46.181445 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vkcn9"] Jan 23 09:04:46 crc kubenswrapper[4711]: I0123 09:04:46.189220 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vkcn9"] Jan 23 09:04:46 crc kubenswrapper[4711]: I0123 09:04:46.196193 4711 scope.go:117] "RemoveContainer" containerID="bc3186fd3359994a046275c5cbbfec22e69eef8da7b546379cf1218e7fdfaf74" Jan 23 09:04:47 crc kubenswrapper[4711]: I0123 09:04:47.485599 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e844965e-09de-46d8-975d-79a12eb959c3" path="/var/lib/kubelet/pods/e844965e-09de-46d8-975d-79a12eb959c3/volumes" Jan 23 09:04:53 crc kubenswrapper[4711]: I0123 09:04:53.474643 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:04:53 crc kubenswrapper[4711]: E0123 09:04:53.475386 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:04:55 crc kubenswrapper[4711]: I0123 09:04:55.842244 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-784948c4bd-c7q8r_b8cff513-6ae8-4f64-b9e5-7dfa6cd9f6d7/keystone-api/0.log" Jan 23 09:04:56 crc kubenswrapper[4711]: I0123 09:04:56.049737 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-cron-29485981-v6rjc_fd607e4c-3a6a-4236-aa1f-7bae23e4db94/keystone-cron/0.log" Jan 23 09:04:56 crc kubenswrapper[4711]: I0123 09:04:56.257576 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-api-0_f70146fe-2308-422b-9efa-42b334f7675f/nova-kuttl-api-api/0.log" Jan 23 09:04:56 crc kubenswrapper[4711]: I0123 09:04:56.352481 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-api-0_f70146fe-2308-422b-9efa-42b334f7675f/nova-kuttl-api-log/0.log" Jan 23 09:04:56 crc kubenswrapper[4711]: I0123 09:04:56.632145 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell0-conductor-0_944ce012-35b1-4e7a-a6ac-8c89d9b8cd1a/nova-kuttl-cell0-conductor-conductor/0.log" Jan 23 09:04:56 crc kubenswrapper[4711]: I0123 09:04:56.849682 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell1-conductor-0_704a67ad-6f29-43f2-b01f-be325aa8cb91/nova-kuttl-cell1-conductor-conductor/0.log" Jan 23 09:04:56 crc kubenswrapper[4711]: I0123 09:04:56.921332 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-cell1-novncproxy-0_4b9c7342-6111-4e46-8bc7-6edcddd570af/nova-kuttl-cell1-novncproxy-novncproxy/0.log" Jan 23 09:04:57 crc kubenswrapper[4711]: I0123 09:04:57.343789 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-metadata-0_667ff795-4e58-403c-9f54-bd5c2ace5456/nova-kuttl-metadata-metadata/0.log" Jan 23 09:04:57 crc kubenswrapper[4711]: I0123 09:04:57.426197 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-metadata-0_667ff795-4e58-403c-9f54-bd5c2ace5456/nova-kuttl-metadata-log/0.log" Jan 23 09:04:57 crc kubenswrapper[4711]: I0123 09:04:57.572129 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_nova-kuttl-scheduler-0_44ba32b3-a02b-4ab5-a00c-90fb25eea139/nova-kuttl-scheduler-scheduler/0.log" Jan 23 09:04:58 crc kubenswrapper[4711]: I0123 09:04:58.123950 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-cell1-galera-0_13ccba34-03d8-4429-bace-b75cb5d12763/galera/0.log" Jan 23 09:04:58 crc kubenswrapper[4711]: I0123 09:04:58.459791 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-cell1-galera-0_13ccba34-03d8-4429-bace-b75cb5d12763/mysql-bootstrap/0.log" Jan 23 09:04:58 crc kubenswrapper[4711]: I0123 09:04:58.460424 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-cell1-galera-0_13ccba34-03d8-4429-bace-b75cb5d12763/mysql-bootstrap/0.log" Jan 23 09:04:58 crc kubenswrapper[4711]: I0123 09:04:58.590070 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-galera-0_a21f5317-eee2-4f13-9df5-40c48bce5aaf/mysql-bootstrap/0.log" Jan 23 09:04:58 crc kubenswrapper[4711]: I0123 09:04:58.636751 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_memcached-0_e899df0c-3fb3-4d7b-b376-0a907dbc82a0/memcached/0.log" Jan 23 09:04:58 crc kubenswrapper[4711]: I0123 09:04:58.855750 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-galera-0_a21f5317-eee2-4f13-9df5-40c48bce5aaf/galera/0.log" Jan 23 09:04:58 crc kubenswrapper[4711]: I0123 09:04:58.870342 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-galera-0_a21f5317-eee2-4f13-9df5-40c48bce5aaf/mysql-bootstrap/0.log" Jan 23 09:04:58 crc kubenswrapper[4711]: I0123 09:04:58.891740 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstackclient_3ac7d3e6-9992-4a83-bbff-8c99ef784b20/openstackclient/0.log" Jan 23 09:04:59 crc kubenswrapper[4711]: I0123 09:04:59.079147 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-559c845968-gb6qv_eb857478-48a3-4ed9-8a19-47386937c4d7/placement-api/0.log" Jan 23 09:04:59 crc kubenswrapper[4711]: I0123 09:04:59.163074 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-559c845968-gb6qv_eb857478-48a3-4ed9-8a19-47386937c4d7/placement-log/0.log" Jan 23 09:04:59 crc kubenswrapper[4711]: I0123 09:04:59.267984 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-broadcaster-server-0_b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf/setup-container/0.log" Jan 23 09:04:59 crc kubenswrapper[4711]: I0123 09:04:59.486828 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-broadcaster-server-0_b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf/setup-container/0.log" Jan 23 09:04:59 crc kubenswrapper[4711]: I0123 09:04:59.545091 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-broadcaster-server-0_b43f9b5a-a1d3-4cc6-8caf-20bcd74910bf/rabbitmq/0.log" Jan 23 09:04:59 crc kubenswrapper[4711]: I0123 09:04:59.564842 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-cell1-server-0_00e53f46-c48c-4f2c-83aa-088781b82d46/setup-container/0.log" Jan 23 09:04:59 crc kubenswrapper[4711]: I0123 09:04:59.749874 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-cell1-server-0_00e53f46-c48c-4f2c-83aa-088781b82d46/setup-container/0.log" Jan 23 09:04:59 crc kubenswrapper[4711]: I0123 09:04:59.756290 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-cell1-server-0_00e53f46-c48c-4f2c-83aa-088781b82d46/rabbitmq/0.log" Jan 23 09:04:59 crc kubenswrapper[4711]: I0123 09:04:59.803200 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-server-0_f970c1db-48d5-4b49-afc1-eee7e1289da9/setup-container/0.log" Jan 23 09:04:59 crc kubenswrapper[4711]: I0123 09:04:59.938288 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-server-0_f970c1db-48d5-4b49-afc1-eee7e1289da9/setup-container/0.log" Jan 23 09:04:59 crc kubenswrapper[4711]: I0123 09:04:59.970769 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-server-0_f970c1db-48d5-4b49-afc1-eee7e1289da9/rabbitmq/0.log" Jan 23 09:05:08 crc kubenswrapper[4711]: I0123 09:05:08.473813 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:05:08 crc kubenswrapper[4711]: E0123 09:05:08.474608 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:05:13 crc kubenswrapper[4711]: I0123 09:05:13.967956 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr_dc8bec03-5a55-4232-a3de-6650e4c7a7da/util/0.log" Jan 23 09:05:14 crc kubenswrapper[4711]: I0123 09:05:14.172853 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr_dc8bec03-5a55-4232-a3de-6650e4c7a7da/util/0.log" Jan 23 09:05:14 crc kubenswrapper[4711]: I0123 09:05:14.701294 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr_dc8bec03-5a55-4232-a3de-6650e4c7a7da/pull/0.log" Jan 23 09:05:14 crc kubenswrapper[4711]: I0123 09:05:14.701360 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr_dc8bec03-5a55-4232-a3de-6650e4c7a7da/pull/0.log" Jan 23 09:05:14 crc kubenswrapper[4711]: I0123 09:05:14.702956 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr_dc8bec03-5a55-4232-a3de-6650e4c7a7da/pull/0.log" Jan 23 09:05:14 crc kubenswrapper[4711]: I0123 09:05:14.863164 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr_dc8bec03-5a55-4232-a3de-6650e4c7a7da/util/0.log" Jan 23 09:05:14 crc kubenswrapper[4711]: I0123 09:05:14.878711 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj_0e77f40d-e837-46f6-9a56-df9a7c911bfb/util/0.log" Jan 23 09:05:14 crc kubenswrapper[4711]: I0123 09:05:14.920469 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzssr_dc8bec03-5a55-4232-a3de-6650e4c7a7da/extract/0.log" Jan 23 09:05:15 crc kubenswrapper[4711]: I0123 09:05:15.108320 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj_0e77f40d-e837-46f6-9a56-df9a7c911bfb/util/0.log" Jan 23 09:05:15 crc kubenswrapper[4711]: I0123 09:05:15.149355 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj_0e77f40d-e837-46f6-9a56-df9a7c911bfb/pull/0.log" Jan 23 09:05:15 crc kubenswrapper[4711]: I0123 09:05:15.150420 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj_0e77f40d-e837-46f6-9a56-df9a7c911bfb/pull/0.log" Jan 23 09:05:15 crc kubenswrapper[4711]: I0123 09:05:15.311818 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj_0e77f40d-e837-46f6-9a56-df9a7c911bfb/util/0.log" Jan 23 09:05:15 crc kubenswrapper[4711]: I0123 09:05:15.317529 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj_0e77f40d-e837-46f6-9a56-df9a7c911bfb/pull/0.log" Jan 23 09:05:15 crc kubenswrapper[4711]: I0123 09:05:15.318922 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcthkrj_0e77f40d-e837-46f6-9a56-df9a7c911bfb/extract/0.log" Jan 23 09:05:15 crc kubenswrapper[4711]: I0123 09:05:15.513021 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz_8362e2cd-76b3-44f8-8d22-b8542c471584/util/0.log" Jan 23 09:05:15 crc kubenswrapper[4711]: I0123 09:05:15.730180 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz_8362e2cd-76b3-44f8-8d22-b8542c471584/pull/0.log" Jan 23 09:05:15 crc kubenswrapper[4711]: I0123 09:05:15.756531 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz_8362e2cd-76b3-44f8-8d22-b8542c471584/util/0.log" Jan 23 09:05:15 crc kubenswrapper[4711]: I0123 09:05:15.759870 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz_8362e2cd-76b3-44f8-8d22-b8542c471584/pull/0.log" Jan 23 09:05:15 crc kubenswrapper[4711]: I0123 09:05:15.934936 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz_8362e2cd-76b3-44f8-8d22-b8542c471584/util/0.log" Jan 23 09:05:15 crc kubenswrapper[4711]: I0123 09:05:15.971018 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz_8362e2cd-76b3-44f8-8d22-b8542c471584/pull/0.log" Jan 23 09:05:16 crc kubenswrapper[4711]: I0123 09:05:16.008708 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec7138xdsz_8362e2cd-76b3-44f8-8d22-b8542c471584/extract/0.log" Jan 23 09:05:16 crc kubenswrapper[4711]: I0123 09:05:16.137012 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zjxjv_9bbf14bc-e2e2-4708-882d-e3234f82409b/extract-utilities/0.log" Jan 23 09:05:16 crc kubenswrapper[4711]: I0123 09:05:16.308200 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zjxjv_9bbf14bc-e2e2-4708-882d-e3234f82409b/extract-utilities/0.log" Jan 23 09:05:16 crc kubenswrapper[4711]: I0123 09:05:16.351690 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zjxjv_9bbf14bc-e2e2-4708-882d-e3234f82409b/extract-content/0.log" Jan 23 09:05:16 crc kubenswrapper[4711]: I0123 09:05:16.373440 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zjxjv_9bbf14bc-e2e2-4708-882d-e3234f82409b/extract-content/0.log" Jan 23 09:05:16 crc kubenswrapper[4711]: I0123 09:05:16.583304 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zjxjv_9bbf14bc-e2e2-4708-882d-e3234f82409b/extract-content/0.log" Jan 23 09:05:16 crc kubenswrapper[4711]: I0123 09:05:16.599186 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zjxjv_9bbf14bc-e2e2-4708-882d-e3234f82409b/extract-utilities/0.log" Jan 23 09:05:16 crc kubenswrapper[4711]: I0123 09:05:16.846818 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dq742_1fa1164d-0e54-42bd-9fe1-88f3a02148b0/extract-utilities/0.log" Jan 23 09:05:17 crc kubenswrapper[4711]: I0123 09:05:17.072789 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dq742_1fa1164d-0e54-42bd-9fe1-88f3a02148b0/extract-utilities/0.log" Jan 23 09:05:17 crc kubenswrapper[4711]: I0123 09:05:17.084580 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dq742_1fa1164d-0e54-42bd-9fe1-88f3a02148b0/extract-content/0.log" Jan 23 09:05:17 crc kubenswrapper[4711]: I0123 09:05:17.125370 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dq742_1fa1164d-0e54-42bd-9fe1-88f3a02148b0/extract-content/0.log" Jan 23 09:05:17 crc kubenswrapper[4711]: I0123 09:05:17.225692 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zjxjv_9bbf14bc-e2e2-4708-882d-e3234f82409b/registry-server/0.log" Jan 23 09:05:17 crc kubenswrapper[4711]: I0123 09:05:17.385344 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dq742_1fa1164d-0e54-42bd-9fe1-88f3a02148b0/extract-content/0.log" Jan 23 09:05:17 crc kubenswrapper[4711]: I0123 09:05:17.426189 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dq742_1fa1164d-0e54-42bd-9fe1-88f3a02148b0/extract-utilities/0.log" Jan 23 09:05:17 crc kubenswrapper[4711]: I0123 09:05:17.664412 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-hzv6z_a9f6b0c2-4bc3-4bf5-987e-93b1f6ea0520/marketplace-operator/0.log" Jan 23 09:05:17 crc kubenswrapper[4711]: I0123 09:05:17.729619 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nx6zm_f82925db-b9b1-4c44-8e2f-467607bd171c/extract-utilities/0.log" Jan 23 09:05:17 crc kubenswrapper[4711]: I0123 09:05:17.997274 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nx6zm_f82925db-b9b1-4c44-8e2f-467607bd171c/extract-content/0.log" Jan 23 09:05:18 crc kubenswrapper[4711]: I0123 09:05:18.048436 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nx6zm_f82925db-b9b1-4c44-8e2f-467607bd171c/extract-utilities/0.log" Jan 23 09:05:18 crc kubenswrapper[4711]: I0123 09:05:18.071314 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nx6zm_f82925db-b9b1-4c44-8e2f-467607bd171c/extract-content/0.log" Jan 23 09:05:18 crc kubenswrapper[4711]: I0123 09:05:18.092186 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dq742_1fa1164d-0e54-42bd-9fe1-88f3a02148b0/registry-server/0.log" Jan 23 09:05:18 crc kubenswrapper[4711]: I0123 09:05:18.207204 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nx6zm_f82925db-b9b1-4c44-8e2f-467607bd171c/extract-utilities/0.log" Jan 23 09:05:18 crc kubenswrapper[4711]: I0123 09:05:18.254295 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nx6zm_f82925db-b9b1-4c44-8e2f-467607bd171c/extract-content/0.log" Jan 23 09:05:18 crc kubenswrapper[4711]: I0123 09:05:18.366092 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7t5gv_5fc0d972-014e-4c07-b699-372362c53774/extract-utilities/0.log" Jan 23 09:05:18 crc kubenswrapper[4711]: I0123 09:05:18.623115 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nx6zm_f82925db-b9b1-4c44-8e2f-467607bd171c/registry-server/0.log" Jan 23 09:05:18 crc kubenswrapper[4711]: I0123 09:05:18.788725 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7t5gv_5fc0d972-014e-4c07-b699-372362c53774/extract-content/0.log" Jan 23 09:05:18 crc kubenswrapper[4711]: I0123 09:05:18.788767 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7t5gv_5fc0d972-014e-4c07-b699-372362c53774/extract-utilities/0.log" Jan 23 09:05:18 crc kubenswrapper[4711]: I0123 09:05:18.810756 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7t5gv_5fc0d972-014e-4c07-b699-372362c53774/extract-content/0.log" Jan 23 09:05:18 crc kubenswrapper[4711]: I0123 09:05:18.974185 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7t5gv_5fc0d972-014e-4c07-b699-372362c53774/extract-utilities/0.log" Jan 23 09:05:18 crc kubenswrapper[4711]: I0123 09:05:18.996216 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7t5gv_5fc0d972-014e-4c07-b699-372362c53774/extract-content/0.log" Jan 23 09:05:19 crc kubenswrapper[4711]: I0123 09:05:19.466342 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7t5gv_5fc0d972-014e-4c07-b699-372362c53774/registry-server/0.log" Jan 23 09:05:19 crc kubenswrapper[4711]: I0123 09:05:19.474899 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:05:19 crc kubenswrapper[4711]: E0123 09:05:19.475138 4711 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2t9r8_openshift-machine-config-operator(3846d4e0-cfda-4e0b-8747-85267de12736)\"" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" Jan 23 09:05:33 crc kubenswrapper[4711]: I0123 09:05:33.475295 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:05:35 crc kubenswrapper[4711]: I0123 09:05:35.517744 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"9bb5bbd648af9315235be45b215ca2d23345eb516119337679151e09a532a4b5"} Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.102189 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9nlqh"] Jan 23 09:05:48 crc kubenswrapper[4711]: E0123 09:05:48.103407 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e844965e-09de-46d8-975d-79a12eb959c3" containerName="extract-content" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.103426 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e844965e-09de-46d8-975d-79a12eb959c3" containerName="extract-content" Jan 23 09:05:48 crc kubenswrapper[4711]: E0123 09:05:48.103452 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e844965e-09de-46d8-975d-79a12eb959c3" containerName="registry-server" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.103460 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e844965e-09de-46d8-975d-79a12eb959c3" containerName="registry-server" Jan 23 09:05:48 crc kubenswrapper[4711]: E0123 09:05:48.103482 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e844965e-09de-46d8-975d-79a12eb959c3" containerName="extract-utilities" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.103493 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="e844965e-09de-46d8-975d-79a12eb959c3" containerName="extract-utilities" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.103728 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="e844965e-09de-46d8-975d-79a12eb959c3" containerName="registry-server" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.105059 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.118691 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nlqh"] Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.171801 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b422ba8f-7f4d-4138-9260-5ca0118636a9-catalog-content\") pod \"redhat-marketplace-9nlqh\" (UID: \"b422ba8f-7f4d-4138-9260-5ca0118636a9\") " pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.171941 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b422ba8f-7f4d-4138-9260-5ca0118636a9-utilities\") pod \"redhat-marketplace-9nlqh\" (UID: \"b422ba8f-7f4d-4138-9260-5ca0118636a9\") " pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.171981 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxwh7\" (UniqueName: \"kubernetes.io/projected/b422ba8f-7f4d-4138-9260-5ca0118636a9-kube-api-access-nxwh7\") pod \"redhat-marketplace-9nlqh\" (UID: \"b422ba8f-7f4d-4138-9260-5ca0118636a9\") " pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.273698 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b422ba8f-7f4d-4138-9260-5ca0118636a9-utilities\") pod \"redhat-marketplace-9nlqh\" (UID: \"b422ba8f-7f4d-4138-9260-5ca0118636a9\") " pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.273762 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxwh7\" (UniqueName: \"kubernetes.io/projected/b422ba8f-7f4d-4138-9260-5ca0118636a9-kube-api-access-nxwh7\") pod \"redhat-marketplace-9nlqh\" (UID: \"b422ba8f-7f4d-4138-9260-5ca0118636a9\") " pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.273866 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b422ba8f-7f4d-4138-9260-5ca0118636a9-catalog-content\") pod \"redhat-marketplace-9nlqh\" (UID: \"b422ba8f-7f4d-4138-9260-5ca0118636a9\") " pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.274444 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b422ba8f-7f4d-4138-9260-5ca0118636a9-catalog-content\") pod \"redhat-marketplace-9nlqh\" (UID: \"b422ba8f-7f4d-4138-9260-5ca0118636a9\") " pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.274740 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b422ba8f-7f4d-4138-9260-5ca0118636a9-utilities\") pod \"redhat-marketplace-9nlqh\" (UID: \"b422ba8f-7f4d-4138-9260-5ca0118636a9\") " pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.295466 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxwh7\" (UniqueName: \"kubernetes.io/projected/b422ba8f-7f4d-4138-9260-5ca0118636a9-kube-api-access-nxwh7\") pod \"redhat-marketplace-9nlqh\" (UID: \"b422ba8f-7f4d-4138-9260-5ca0118636a9\") " pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.434266 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:05:48 crc kubenswrapper[4711]: I0123 09:05:48.978247 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nlqh"] Jan 23 09:05:48 crc kubenswrapper[4711]: W0123 09:05:48.992021 4711 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb422ba8f_7f4d_4138_9260_5ca0118636a9.slice/crio-9603c6d306d300a989715101a2e731149510cb344efc19a2fed7da35d4d732d6 WatchSource:0}: Error finding container 9603c6d306d300a989715101a2e731149510cb344efc19a2fed7da35d4d732d6: Status 404 returned error can't find the container with id 9603c6d306d300a989715101a2e731149510cb344efc19a2fed7da35d4d732d6 Jan 23 09:05:49 crc kubenswrapper[4711]: I0123 09:05:49.666619 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nlqh" event={"ID":"b422ba8f-7f4d-4138-9260-5ca0118636a9","Type":"ContainerStarted","Data":"9603c6d306d300a989715101a2e731149510cb344efc19a2fed7da35d4d732d6"} Jan 23 09:05:53 crc kubenswrapper[4711]: I0123 09:05:53.695743 4711 generic.go:334] "Generic (PLEG): container finished" podID="b422ba8f-7f4d-4138-9260-5ca0118636a9" containerID="fbb4fc8562260883671be3ed8467de6d0d6193249100efe20057e3f5a905e73d" exitCode=0 Jan 23 09:05:53 crc kubenswrapper[4711]: I0123 09:05:53.695836 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nlqh" event={"ID":"b422ba8f-7f4d-4138-9260-5ca0118636a9","Type":"ContainerDied","Data":"fbb4fc8562260883671be3ed8467de6d0d6193249100efe20057e3f5a905e73d"} Jan 23 09:06:04 crc kubenswrapper[4711]: I0123 09:06:04.851983 4711 generic.go:334] "Generic (PLEG): container finished" podID="b422ba8f-7f4d-4138-9260-5ca0118636a9" containerID="21e65d9aab3cfd94671c91a99ffb231f28d57315c9d55c31fc40df442cbd4c85" exitCode=0 Jan 23 09:06:04 crc kubenswrapper[4711]: I0123 09:06:04.852570 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nlqh" event={"ID":"b422ba8f-7f4d-4138-9260-5ca0118636a9","Type":"ContainerDied","Data":"21e65d9aab3cfd94671c91a99ffb231f28d57315c9d55c31fc40df442cbd4c85"} Jan 23 09:06:11 crc kubenswrapper[4711]: I0123 09:06:11.919473 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nlqh" event={"ID":"b422ba8f-7f4d-4138-9260-5ca0118636a9","Type":"ContainerStarted","Data":"e9bdf38481b9dcee03cbdac99f0a13893e559b8463d97cb2611243900f800e6c"} Jan 23 09:06:12 crc kubenswrapper[4711]: I0123 09:06:12.951430 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9nlqh" podStartSLOduration=11.23185138 podStartE2EDuration="24.951412619s" podCreationTimestamp="2026-01-23 09:05:48 +0000 UTC" firstStartedPulling="2026-01-23 09:05:55.735543849 +0000 UTC m=+2741.308500217" lastFinishedPulling="2026-01-23 09:06:09.455105078 +0000 UTC m=+2755.028061456" observedRunningTime="2026-01-23 09:06:12.947780639 +0000 UTC m=+2758.520737017" watchObservedRunningTime="2026-01-23 09:06:12.951412619 +0000 UTC m=+2758.524368987" Jan 23 09:06:18 crc kubenswrapper[4711]: I0123 09:06:18.434686 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:06:18 crc kubenswrapper[4711]: I0123 09:06:18.435367 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:06:18 crc kubenswrapper[4711]: I0123 09:06:18.478782 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:06:19 crc kubenswrapper[4711]: I0123 09:06:19.035677 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:06:19 crc kubenswrapper[4711]: I0123 09:06:19.318611 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nlqh"] Jan 23 09:06:20 crc kubenswrapper[4711]: I0123 09:06:20.993748 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9nlqh" podUID="b422ba8f-7f4d-4138-9260-5ca0118636a9" containerName="registry-server" containerID="cri-o://e9bdf38481b9dcee03cbdac99f0a13893e559b8463d97cb2611243900f800e6c" gracePeriod=2 Jan 23 09:06:22 crc kubenswrapper[4711]: I0123 09:06:22.003019 4711 generic.go:334] "Generic (PLEG): container finished" podID="b422ba8f-7f4d-4138-9260-5ca0118636a9" containerID="e9bdf38481b9dcee03cbdac99f0a13893e559b8463d97cb2611243900f800e6c" exitCode=0 Jan 23 09:06:22 crc kubenswrapper[4711]: I0123 09:06:22.003070 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nlqh" event={"ID":"b422ba8f-7f4d-4138-9260-5ca0118636a9","Type":"ContainerDied","Data":"e9bdf38481b9dcee03cbdac99f0a13893e559b8463d97cb2611243900f800e6c"} Jan 23 09:06:22 crc kubenswrapper[4711]: I0123 09:06:22.807567 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:06:22 crc kubenswrapper[4711]: I0123 09:06:22.967408 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b422ba8f-7f4d-4138-9260-5ca0118636a9-utilities\") pod \"b422ba8f-7f4d-4138-9260-5ca0118636a9\" (UID: \"b422ba8f-7f4d-4138-9260-5ca0118636a9\") " Jan 23 09:06:22 crc kubenswrapper[4711]: I0123 09:06:22.967901 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b422ba8f-7f4d-4138-9260-5ca0118636a9-catalog-content\") pod \"b422ba8f-7f4d-4138-9260-5ca0118636a9\" (UID: \"b422ba8f-7f4d-4138-9260-5ca0118636a9\") " Jan 23 09:06:22 crc kubenswrapper[4711]: I0123 09:06:22.968052 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxwh7\" (UniqueName: \"kubernetes.io/projected/b422ba8f-7f4d-4138-9260-5ca0118636a9-kube-api-access-nxwh7\") pod \"b422ba8f-7f4d-4138-9260-5ca0118636a9\" (UID: \"b422ba8f-7f4d-4138-9260-5ca0118636a9\") " Jan 23 09:06:22 crc kubenswrapper[4711]: I0123 09:06:22.968596 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b422ba8f-7f4d-4138-9260-5ca0118636a9-utilities" (OuterVolumeSpecName: "utilities") pod "b422ba8f-7f4d-4138-9260-5ca0118636a9" (UID: "b422ba8f-7f4d-4138-9260-5ca0118636a9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 09:06:22 crc kubenswrapper[4711]: I0123 09:06:22.986745 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b422ba8f-7f4d-4138-9260-5ca0118636a9-kube-api-access-nxwh7" (OuterVolumeSpecName: "kube-api-access-nxwh7") pod "b422ba8f-7f4d-4138-9260-5ca0118636a9" (UID: "b422ba8f-7f4d-4138-9260-5ca0118636a9"). InnerVolumeSpecName "kube-api-access-nxwh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 09:06:22 crc kubenswrapper[4711]: I0123 09:06:22.994821 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b422ba8f-7f4d-4138-9260-5ca0118636a9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b422ba8f-7f4d-4138-9260-5ca0118636a9" (UID: "b422ba8f-7f4d-4138-9260-5ca0118636a9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 09:06:23 crc kubenswrapper[4711]: I0123 09:06:23.023915 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9nlqh" event={"ID":"b422ba8f-7f4d-4138-9260-5ca0118636a9","Type":"ContainerDied","Data":"9603c6d306d300a989715101a2e731149510cb344efc19a2fed7da35d4d732d6"} Jan 23 09:06:23 crc kubenswrapper[4711]: I0123 09:06:23.023985 4711 scope.go:117] "RemoveContainer" containerID="e9bdf38481b9dcee03cbdac99f0a13893e559b8463d97cb2611243900f800e6c" Jan 23 09:06:23 crc kubenswrapper[4711]: I0123 09:06:23.024205 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9nlqh" Jan 23 09:06:23 crc kubenswrapper[4711]: I0123 09:06:23.064096 4711 scope.go:117] "RemoveContainer" containerID="21e65d9aab3cfd94671c91a99ffb231f28d57315c9d55c31fc40df442cbd4c85" Jan 23 09:06:23 crc kubenswrapper[4711]: I0123 09:06:23.066879 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nlqh"] Jan 23 09:06:23 crc kubenswrapper[4711]: I0123 09:06:23.069497 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b422ba8f-7f4d-4138-9260-5ca0118636a9-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 09:06:23 crc kubenswrapper[4711]: I0123 09:06:23.069540 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b422ba8f-7f4d-4138-9260-5ca0118636a9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 09:06:23 crc kubenswrapper[4711]: I0123 09:06:23.069553 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxwh7\" (UniqueName: \"kubernetes.io/projected/b422ba8f-7f4d-4138-9260-5ca0118636a9-kube-api-access-nxwh7\") on node \"crc\" DevicePath \"\"" Jan 23 09:06:23 crc kubenswrapper[4711]: I0123 09:06:23.074824 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9nlqh"] Jan 23 09:06:23 crc kubenswrapper[4711]: I0123 09:06:23.088466 4711 scope.go:117] "RemoveContainer" containerID="fbb4fc8562260883671be3ed8467de6d0d6193249100efe20057e3f5a905e73d" Jan 23 09:06:23 crc kubenswrapper[4711]: I0123 09:06:23.506828 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b422ba8f-7f4d-4138-9260-5ca0118636a9" path="/var/lib/kubelet/pods/b422ba8f-7f4d-4138-9260-5ca0118636a9/volumes" Jan 23 09:06:41 crc kubenswrapper[4711]: I0123 09:06:41.158112 4711 generic.go:334] "Generic (PLEG): container finished" podID="b5fda0d5-3233-4592-9a13-39e692ec48c1" containerID="1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e" exitCode=0 Jan 23 09:06:41 crc kubenswrapper[4711]: I0123 09:06:41.158197 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-zltqz/must-gather-x52ds" event={"ID":"b5fda0d5-3233-4592-9a13-39e692ec48c1","Type":"ContainerDied","Data":"1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e"} Jan 23 09:06:41 crc kubenswrapper[4711]: I0123 09:06:41.159208 4711 scope.go:117] "RemoveContainer" containerID="1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e" Jan 23 09:06:41 crc kubenswrapper[4711]: I0123 09:06:41.865574 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zltqz_must-gather-x52ds_b5fda0d5-3233-4592-9a13-39e692ec48c1/gather/0.log" Jan 23 09:06:49 crc kubenswrapper[4711]: I0123 09:06:49.735537 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-zltqz/must-gather-x52ds"] Jan 23 09:06:49 crc kubenswrapper[4711]: I0123 09:06:49.736389 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-zltqz/must-gather-x52ds" podUID="b5fda0d5-3233-4592-9a13-39e692ec48c1" containerName="copy" containerID="cri-o://6d7951a0d2da48fa5d38b8a2d17d7ec23240faadc6ecb70e1882c039e0b77fe4" gracePeriod=2 Jan 23 09:06:49 crc kubenswrapper[4711]: I0123 09:06:49.747994 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-zltqz/must-gather-x52ds"] Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.162873 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zltqz_must-gather-x52ds_b5fda0d5-3233-4592-9a13-39e692ec48c1/copy/0.log" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.163518 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zltqz/must-gather-x52ds" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.225523 4711 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-zltqz_must-gather-x52ds_b5fda0d5-3233-4592-9a13-39e692ec48c1/copy/0.log" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.225953 4711 generic.go:334] "Generic (PLEG): container finished" podID="b5fda0d5-3233-4592-9a13-39e692ec48c1" containerID="6d7951a0d2da48fa5d38b8a2d17d7ec23240faadc6ecb70e1882c039e0b77fe4" exitCode=143 Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.226022 4711 scope.go:117] "RemoveContainer" containerID="6d7951a0d2da48fa5d38b8a2d17d7ec23240faadc6ecb70e1882c039e0b77fe4" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.226028 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-zltqz/must-gather-x52ds" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.227266 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bz27\" (UniqueName: \"kubernetes.io/projected/b5fda0d5-3233-4592-9a13-39e692ec48c1-kube-api-access-6bz27\") pod \"b5fda0d5-3233-4592-9a13-39e692ec48c1\" (UID: \"b5fda0d5-3233-4592-9a13-39e692ec48c1\") " Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.227375 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b5fda0d5-3233-4592-9a13-39e692ec48c1-must-gather-output\") pod \"b5fda0d5-3233-4592-9a13-39e692ec48c1\" (UID: \"b5fda0d5-3233-4592-9a13-39e692ec48c1\") " Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.232753 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5fda0d5-3233-4592-9a13-39e692ec48c1-kube-api-access-6bz27" (OuterVolumeSpecName: "kube-api-access-6bz27") pod "b5fda0d5-3233-4592-9a13-39e692ec48c1" (UID: "b5fda0d5-3233-4592-9a13-39e692ec48c1"). InnerVolumeSpecName "kube-api-access-6bz27". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.275322 4711 scope.go:117] "RemoveContainer" containerID="1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.329823 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bz27\" (UniqueName: \"kubernetes.io/projected/b5fda0d5-3233-4592-9a13-39e692ec48c1-kube-api-access-6bz27\") on node \"crc\" DevicePath \"\"" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.343978 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5fda0d5-3233-4592-9a13-39e692ec48c1-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "b5fda0d5-3233-4592-9a13-39e692ec48c1" (UID: "b5fda0d5-3233-4592-9a13-39e692ec48c1"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.350995 4711 scope.go:117] "RemoveContainer" containerID="6d7951a0d2da48fa5d38b8a2d17d7ec23240faadc6ecb70e1882c039e0b77fe4" Jan 23 09:06:50 crc kubenswrapper[4711]: E0123 09:06:50.352373 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d7951a0d2da48fa5d38b8a2d17d7ec23240faadc6ecb70e1882c039e0b77fe4\": container with ID starting with 6d7951a0d2da48fa5d38b8a2d17d7ec23240faadc6ecb70e1882c039e0b77fe4 not found: ID does not exist" containerID="6d7951a0d2da48fa5d38b8a2d17d7ec23240faadc6ecb70e1882c039e0b77fe4" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.352413 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d7951a0d2da48fa5d38b8a2d17d7ec23240faadc6ecb70e1882c039e0b77fe4"} err="failed to get container status \"6d7951a0d2da48fa5d38b8a2d17d7ec23240faadc6ecb70e1882c039e0b77fe4\": rpc error: code = NotFound desc = could not find container \"6d7951a0d2da48fa5d38b8a2d17d7ec23240faadc6ecb70e1882c039e0b77fe4\": container with ID starting with 6d7951a0d2da48fa5d38b8a2d17d7ec23240faadc6ecb70e1882c039e0b77fe4 not found: ID does not exist" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.352436 4711 scope.go:117] "RemoveContainer" containerID="1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e" Jan 23 09:06:50 crc kubenswrapper[4711]: E0123 09:06:50.353375 4711 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e\": container with ID starting with 1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e not found: ID does not exist" containerID="1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.353403 4711 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e"} err="failed to get container status \"1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e\": rpc error: code = NotFound desc = could not find container \"1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e\": container with ID starting with 1bca349dde14917b742c37500ce8e4b88802348e8149b8669ed7d2e67eb1247e not found: ID does not exist" Jan 23 09:06:50 crc kubenswrapper[4711]: I0123 09:06:50.431609 4711 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b5fda0d5-3233-4592-9a13-39e692ec48c1-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 23 09:06:51 crc kubenswrapper[4711]: I0123 09:06:51.484262 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5fda0d5-3233-4592-9a13-39e692ec48c1" path="/var/lib/kubelet/pods/b5fda0d5-3233-4592-9a13-39e692ec48c1/volumes" Jan 23 09:07:55 crc kubenswrapper[4711]: I0123 09:07:55.996713 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 09:07:55 crc kubenswrapper[4711]: I0123 09:07:55.997568 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 09:08:25 crc kubenswrapper[4711]: I0123 09:08:25.994175 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 09:08:25 crc kubenswrapper[4711]: I0123 09:08:25.994795 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 09:08:55 crc kubenswrapper[4711]: I0123 09:08:55.993450 4711 patch_prober.go:28] interesting pod/machine-config-daemon-2t9r8 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 23 09:08:55 crc kubenswrapper[4711]: I0123 09:08:55.994009 4711 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 23 09:08:55 crc kubenswrapper[4711]: I0123 09:08:55.994053 4711 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" Jan 23 09:08:55 crc kubenswrapper[4711]: I0123 09:08:55.994585 4711 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9bb5bbd648af9315235be45b215ca2d23345eb516119337679151e09a532a4b5"} pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 23 09:08:55 crc kubenswrapper[4711]: I0123 09:08:55.994636 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" podUID="3846d4e0-cfda-4e0b-8747-85267de12736" containerName="machine-config-daemon" containerID="cri-o://9bb5bbd648af9315235be45b215ca2d23345eb516119337679151e09a532a4b5" gracePeriod=600 Jan 23 09:08:56 crc kubenswrapper[4711]: I0123 09:08:56.310726 4711 generic.go:334] "Generic (PLEG): container finished" podID="3846d4e0-cfda-4e0b-8747-85267de12736" containerID="9bb5bbd648af9315235be45b215ca2d23345eb516119337679151e09a532a4b5" exitCode=0 Jan 23 09:08:56 crc kubenswrapper[4711]: I0123 09:08:56.311188 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerDied","Data":"9bb5bbd648af9315235be45b215ca2d23345eb516119337679151e09a532a4b5"} Jan 23 09:08:56 crc kubenswrapper[4711]: I0123 09:08:56.311299 4711 scope.go:117] "RemoveContainer" containerID="19b4a3785d9cb2b468d499af6f2cb9b76a5210dad112a86453ed91200931bae7" Jan 23 09:08:58 crc kubenswrapper[4711]: I0123 09:08:58.374047 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2t9r8" event={"ID":"3846d4e0-cfda-4e0b-8747-85267de12736","Type":"ContainerStarted","Data":"e16a5050665543a7647eab3d72bd461276515155dea2c17699514f284893c4bf"} Jan 23 09:09:08 crc kubenswrapper[4711]: I0123 09:09:08.977685 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-258h5"] Jan 23 09:09:08 crc kubenswrapper[4711]: E0123 09:09:08.982165 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5fda0d5-3233-4592-9a13-39e692ec48c1" containerName="copy" Jan 23 09:09:08 crc kubenswrapper[4711]: I0123 09:09:08.982389 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5fda0d5-3233-4592-9a13-39e692ec48c1" containerName="copy" Jan 23 09:09:08 crc kubenswrapper[4711]: E0123 09:09:08.982537 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b422ba8f-7f4d-4138-9260-5ca0118636a9" containerName="extract-utilities" Jan 23 09:09:08 crc kubenswrapper[4711]: I0123 09:09:08.982659 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b422ba8f-7f4d-4138-9260-5ca0118636a9" containerName="extract-utilities" Jan 23 09:09:08 crc kubenswrapper[4711]: E0123 09:09:08.982792 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b422ba8f-7f4d-4138-9260-5ca0118636a9" containerName="registry-server" Jan 23 09:09:08 crc kubenswrapper[4711]: I0123 09:09:08.982899 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b422ba8f-7f4d-4138-9260-5ca0118636a9" containerName="registry-server" Jan 23 09:09:08 crc kubenswrapper[4711]: E0123 09:09:08.983013 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5fda0d5-3233-4592-9a13-39e692ec48c1" containerName="gather" Jan 23 09:09:08 crc kubenswrapper[4711]: I0123 09:09:08.983120 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5fda0d5-3233-4592-9a13-39e692ec48c1" containerName="gather" Jan 23 09:09:08 crc kubenswrapper[4711]: E0123 09:09:08.983230 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b422ba8f-7f4d-4138-9260-5ca0118636a9" containerName="extract-content" Jan 23 09:09:08 crc kubenswrapper[4711]: I0123 09:09:08.983329 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="b422ba8f-7f4d-4138-9260-5ca0118636a9" containerName="extract-content" Jan 23 09:09:08 crc kubenswrapper[4711]: I0123 09:09:08.983829 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5fda0d5-3233-4592-9a13-39e692ec48c1" containerName="gather" Jan 23 09:09:08 crc kubenswrapper[4711]: I0123 09:09:08.983976 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5fda0d5-3233-4592-9a13-39e692ec48c1" containerName="copy" Jan 23 09:09:08 crc kubenswrapper[4711]: I0123 09:09:08.984097 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="b422ba8f-7f4d-4138-9260-5ca0118636a9" containerName="registry-server" Jan 23 09:09:08 crc kubenswrapper[4711]: I0123 09:09:08.985940 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:09 crc kubenswrapper[4711]: I0123 09:09:09.002052 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-258h5"] Jan 23 09:09:09 crc kubenswrapper[4711]: I0123 09:09:09.175467 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a18becae-f8f4-4caa-bb50-eb3771125005-utilities\") pod \"community-operators-258h5\" (UID: \"a18becae-f8f4-4caa-bb50-eb3771125005\") " pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:09 crc kubenswrapper[4711]: I0123 09:09:09.175638 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdxmx\" (UniqueName: \"kubernetes.io/projected/a18becae-f8f4-4caa-bb50-eb3771125005-kube-api-access-mdxmx\") pod \"community-operators-258h5\" (UID: \"a18becae-f8f4-4caa-bb50-eb3771125005\") " pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:09 crc kubenswrapper[4711]: I0123 09:09:09.175714 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a18becae-f8f4-4caa-bb50-eb3771125005-catalog-content\") pod \"community-operators-258h5\" (UID: \"a18becae-f8f4-4caa-bb50-eb3771125005\") " pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:09 crc kubenswrapper[4711]: I0123 09:09:09.276893 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a18becae-f8f4-4caa-bb50-eb3771125005-utilities\") pod \"community-operators-258h5\" (UID: \"a18becae-f8f4-4caa-bb50-eb3771125005\") " pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:09 crc kubenswrapper[4711]: I0123 09:09:09.276970 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdxmx\" (UniqueName: \"kubernetes.io/projected/a18becae-f8f4-4caa-bb50-eb3771125005-kube-api-access-mdxmx\") pod \"community-operators-258h5\" (UID: \"a18becae-f8f4-4caa-bb50-eb3771125005\") " pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:09 crc kubenswrapper[4711]: I0123 09:09:09.277017 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a18becae-f8f4-4caa-bb50-eb3771125005-catalog-content\") pod \"community-operators-258h5\" (UID: \"a18becae-f8f4-4caa-bb50-eb3771125005\") " pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:09 crc kubenswrapper[4711]: I0123 09:09:09.277342 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a18becae-f8f4-4caa-bb50-eb3771125005-utilities\") pod \"community-operators-258h5\" (UID: \"a18becae-f8f4-4caa-bb50-eb3771125005\") " pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:09 crc kubenswrapper[4711]: I0123 09:09:09.277380 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a18becae-f8f4-4caa-bb50-eb3771125005-catalog-content\") pod \"community-operators-258h5\" (UID: \"a18becae-f8f4-4caa-bb50-eb3771125005\") " pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:09 crc kubenswrapper[4711]: I0123 09:09:09.300707 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdxmx\" (UniqueName: \"kubernetes.io/projected/a18becae-f8f4-4caa-bb50-eb3771125005-kube-api-access-mdxmx\") pod \"community-operators-258h5\" (UID: \"a18becae-f8f4-4caa-bb50-eb3771125005\") " pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:09 crc kubenswrapper[4711]: I0123 09:09:09.326830 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:09 crc kubenswrapper[4711]: I0123 09:09:09.840564 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-258h5"] Jan 23 09:09:10 crc kubenswrapper[4711]: I0123 09:09:10.569077 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-258h5" event={"ID":"a18becae-f8f4-4caa-bb50-eb3771125005","Type":"ContainerStarted","Data":"7f36392b397a116c703102dbabe9d5e400670b198e44230be40a2510e92eaf07"} Jan 23 09:09:10 crc kubenswrapper[4711]: I0123 09:09:10.569360 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-258h5" event={"ID":"a18becae-f8f4-4caa-bb50-eb3771125005","Type":"ContainerStarted","Data":"71bd6652ea9af70e87a8efd2e2e6c038d4161424e2d99ae581611d4c5e628e37"} Jan 23 09:09:11 crc kubenswrapper[4711]: I0123 09:09:11.580490 4711 generic.go:334] "Generic (PLEG): container finished" podID="a18becae-f8f4-4caa-bb50-eb3771125005" containerID="7f36392b397a116c703102dbabe9d5e400670b198e44230be40a2510e92eaf07" exitCode=0 Jan 23 09:09:11 crc kubenswrapper[4711]: I0123 09:09:11.580654 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-258h5" event={"ID":"a18becae-f8f4-4caa-bb50-eb3771125005","Type":"ContainerDied","Data":"7f36392b397a116c703102dbabe9d5e400670b198e44230be40a2510e92eaf07"} Jan 23 09:09:11 crc kubenswrapper[4711]: I0123 09:09:11.583851 4711 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 23 09:09:17 crc kubenswrapper[4711]: I0123 09:09:17.623931 4711 generic.go:334] "Generic (PLEG): container finished" podID="a18becae-f8f4-4caa-bb50-eb3771125005" containerID="7d6ba77546df78b9fa816c513243277ebba4279f2fb25e1e48d86b624b2dede6" exitCode=0 Jan 23 09:09:17 crc kubenswrapper[4711]: I0123 09:09:17.624010 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-258h5" event={"ID":"a18becae-f8f4-4caa-bb50-eb3771125005","Type":"ContainerDied","Data":"7d6ba77546df78b9fa816c513243277ebba4279f2fb25e1e48d86b624b2dede6"} Jan 23 09:09:19 crc kubenswrapper[4711]: I0123 09:09:19.642698 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-258h5" event={"ID":"a18becae-f8f4-4caa-bb50-eb3771125005","Type":"ContainerStarted","Data":"062e5c4ccc27cd45296606c0459591e677b89e153fe04431ea54cd2e2dbf6288"} Jan 23 09:09:19 crc kubenswrapper[4711]: I0123 09:09:19.668745 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-258h5" podStartSLOduration=4.6071045779999995 podStartE2EDuration="11.668726586s" podCreationTimestamp="2026-01-23 09:09:08 +0000 UTC" firstStartedPulling="2026-01-23 09:09:11.583483518 +0000 UTC m=+2937.156439896" lastFinishedPulling="2026-01-23 09:09:18.645105536 +0000 UTC m=+2944.218061904" observedRunningTime="2026-01-23 09:09:19.661358246 +0000 UTC m=+2945.234314624" watchObservedRunningTime="2026-01-23 09:09:19.668726586 +0000 UTC m=+2945.241682954" Jan 23 09:09:29 crc kubenswrapper[4711]: I0123 09:09:29.328177 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:29 crc kubenswrapper[4711]: I0123 09:09:29.329000 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:29 crc kubenswrapper[4711]: I0123 09:09:29.377849 4711 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:29 crc kubenswrapper[4711]: I0123 09:09:29.822924 4711 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:29 crc kubenswrapper[4711]: I0123 09:09:29.889715 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-258h5"] Jan 23 09:09:31 crc kubenswrapper[4711]: I0123 09:09:31.738912 4711 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-258h5" podUID="a18becae-f8f4-4caa-bb50-eb3771125005" containerName="registry-server" containerID="cri-o://062e5c4ccc27cd45296606c0459591e677b89e153fe04431ea54cd2e2dbf6288" gracePeriod=2 Jan 23 09:09:34 crc kubenswrapper[4711]: I0123 09:09:34.760842 4711 generic.go:334] "Generic (PLEG): container finished" podID="a18becae-f8f4-4caa-bb50-eb3771125005" containerID="062e5c4ccc27cd45296606c0459591e677b89e153fe04431ea54cd2e2dbf6288" exitCode=0 Jan 23 09:09:34 crc kubenswrapper[4711]: I0123 09:09:34.760907 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-258h5" event={"ID":"a18becae-f8f4-4caa-bb50-eb3771125005","Type":"ContainerDied","Data":"062e5c4ccc27cd45296606c0459591e677b89e153fe04431ea54cd2e2dbf6288"} Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.461109 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.620725 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a18becae-f8f4-4caa-bb50-eb3771125005-catalog-content\") pod \"a18becae-f8f4-4caa-bb50-eb3771125005\" (UID: \"a18becae-f8f4-4caa-bb50-eb3771125005\") " Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.620878 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdxmx\" (UniqueName: \"kubernetes.io/projected/a18becae-f8f4-4caa-bb50-eb3771125005-kube-api-access-mdxmx\") pod \"a18becae-f8f4-4caa-bb50-eb3771125005\" (UID: \"a18becae-f8f4-4caa-bb50-eb3771125005\") " Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.620974 4711 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a18becae-f8f4-4caa-bb50-eb3771125005-utilities\") pod \"a18becae-f8f4-4caa-bb50-eb3771125005\" (UID: \"a18becae-f8f4-4caa-bb50-eb3771125005\") " Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.622224 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a18becae-f8f4-4caa-bb50-eb3771125005-utilities" (OuterVolumeSpecName: "utilities") pod "a18becae-f8f4-4caa-bb50-eb3771125005" (UID: "a18becae-f8f4-4caa-bb50-eb3771125005"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.626448 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a18becae-f8f4-4caa-bb50-eb3771125005-kube-api-access-mdxmx" (OuterVolumeSpecName: "kube-api-access-mdxmx") pod "a18becae-f8f4-4caa-bb50-eb3771125005" (UID: "a18becae-f8f4-4caa-bb50-eb3771125005"). InnerVolumeSpecName "kube-api-access-mdxmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.672337 4711 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a18becae-f8f4-4caa-bb50-eb3771125005-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a18becae-f8f4-4caa-bb50-eb3771125005" (UID: "a18becae-f8f4-4caa-bb50-eb3771125005"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.722659 4711 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a18becae-f8f4-4caa-bb50-eb3771125005-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.722708 4711 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdxmx\" (UniqueName: \"kubernetes.io/projected/a18becae-f8f4-4caa-bb50-eb3771125005-kube-api-access-mdxmx\") on node \"crc\" DevicePath \"\"" Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.722726 4711 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a18becae-f8f4-4caa-bb50-eb3771125005-utilities\") on node \"crc\" DevicePath \"\"" Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.781314 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-258h5" event={"ID":"a18becae-f8f4-4caa-bb50-eb3771125005","Type":"ContainerDied","Data":"71bd6652ea9af70e87a8efd2e2e6c038d4161424e2d99ae581611d4c5e628e37"} Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.781355 4711 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-258h5" Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.781367 4711 scope.go:117] "RemoveContainer" containerID="062e5c4ccc27cd45296606c0459591e677b89e153fe04431ea54cd2e2dbf6288" Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.808410 4711 scope.go:117] "RemoveContainer" containerID="7d6ba77546df78b9fa816c513243277ebba4279f2fb25e1e48d86b624b2dede6" Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.839053 4711 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-258h5"] Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.849289 4711 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-258h5"] Jan 23 09:09:36 crc kubenswrapper[4711]: I0123 09:09:36.855975 4711 scope.go:117] "RemoveContainer" containerID="7f36392b397a116c703102dbabe9d5e400670b198e44230be40a2510e92eaf07" Jan 23 09:09:37 crc kubenswrapper[4711]: I0123 09:09:37.489203 4711 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a18becae-f8f4-4caa-bb50-eb3771125005" path="/var/lib/kubelet/pods/a18becae-f8f4-4caa-bb50-eb3771125005/volumes" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.182377 4711 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875"] Jan 23 09:10:42 crc kubenswrapper[4711]: E0123 09:10:42.183131 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a18becae-f8f4-4caa-bb50-eb3771125005" containerName="registry-server" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.183143 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a18becae-f8f4-4caa-bb50-eb3771125005" containerName="registry-server" Jan 23 09:10:42 crc kubenswrapper[4711]: E0123 09:10:42.183172 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a18becae-f8f4-4caa-bb50-eb3771125005" containerName="extract-utilities" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.183178 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a18becae-f8f4-4caa-bb50-eb3771125005" containerName="extract-utilities" Jan 23 09:10:42 crc kubenswrapper[4711]: E0123 09:10:42.183188 4711 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a18becae-f8f4-4caa-bb50-eb3771125005" containerName="extract-content" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.183195 4711 state_mem.go:107] "Deleted CPUSet assignment" podUID="a18becae-f8f4-4caa-bb50-eb3771125005" containerName="extract-content" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.183350 4711 memory_manager.go:354] "RemoveStaleState removing state" podUID="a18becae-f8f4-4caa-bb50-eb3771125005" containerName="registry-server" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.183911 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.186167 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-manage-config-data" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.187196 4711 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"nova-kuttl-cell0-manage-scripts" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.189473 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875"] Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.346045 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91578b68-769f-422c-95d4-2918ce49cf9f-config-data\") pod \"nova-kuttl-cell1-cell-delete-xq875\" (UID: \"91578b68-769f-422c-95d4-2918ce49cf9f\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.346145 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91578b68-769f-422c-95d4-2918ce49cf9f-scripts\") pod \"nova-kuttl-cell1-cell-delete-xq875\" (UID: \"91578b68-769f-422c-95d4-2918ce49cf9f\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.346454 4711 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm8vr\" (UniqueName: \"kubernetes.io/projected/91578b68-769f-422c-95d4-2918ce49cf9f-kube-api-access-lm8vr\") pod \"nova-kuttl-cell1-cell-delete-xq875\" (UID: \"91578b68-769f-422c-95d4-2918ce49cf9f\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.448592 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm8vr\" (UniqueName: \"kubernetes.io/projected/91578b68-769f-422c-95d4-2918ce49cf9f-kube-api-access-lm8vr\") pod \"nova-kuttl-cell1-cell-delete-xq875\" (UID: \"91578b68-769f-422c-95d4-2918ce49cf9f\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.448693 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91578b68-769f-422c-95d4-2918ce49cf9f-config-data\") pod \"nova-kuttl-cell1-cell-delete-xq875\" (UID: \"91578b68-769f-422c-95d4-2918ce49cf9f\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.448741 4711 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91578b68-769f-422c-95d4-2918ce49cf9f-scripts\") pod \"nova-kuttl-cell1-cell-delete-xq875\" (UID: \"91578b68-769f-422c-95d4-2918ce49cf9f\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.458196 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91578b68-769f-422c-95d4-2918ce49cf9f-scripts\") pod \"nova-kuttl-cell1-cell-delete-xq875\" (UID: \"91578b68-769f-422c-95d4-2918ce49cf9f\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.458382 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91578b68-769f-422c-95d4-2918ce49cf9f-config-data\") pod \"nova-kuttl-cell1-cell-delete-xq875\" (UID: \"91578b68-769f-422c-95d4-2918ce49cf9f\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.466359 4711 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm8vr\" (UniqueName: \"kubernetes.io/projected/91578b68-769f-422c-95d4-2918ce49cf9f-kube-api-access-lm8vr\") pod \"nova-kuttl-cell1-cell-delete-xq875\" (UID: \"91578b68-769f-422c-95d4-2918ce49cf9f\") " pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.505136 4711 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" Jan 23 09:10:42 crc kubenswrapper[4711]: I0123 09:10:42.963338 4711 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875"] Jan 23 09:10:43 crc kubenswrapper[4711]: I0123 09:10:43.345126 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" event={"ID":"91578b68-769f-422c-95d4-2918ce49cf9f","Type":"ContainerStarted","Data":"710fbaaa7cc40417c7011834616876ad404452ea9f8987eaf3c0b8c261bca18a"} Jan 23 09:10:44 crc kubenswrapper[4711]: I0123 09:10:44.358585 4711 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" event={"ID":"91578b68-769f-422c-95d4-2918ce49cf9f","Type":"ContainerStarted","Data":"0b6d957c6c6d0cd5867b4dc16d763ebc296df14f6e4192f9b54f77a2036fd451"} Jan 23 09:10:44 crc kubenswrapper[4711]: I0123 09:10:44.378708 4711 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/nova-kuttl-cell1-cell-delete-xq875" podStartSLOduration=2.378660756 podStartE2EDuration="2.378660756s" podCreationTimestamp="2026-01-23 09:10:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-23 09:10:44.37556692 +0000 UTC m=+3029.948523308" watchObservedRunningTime="2026-01-23 09:10:44.378660756 +0000 UTC m=+3029.951617124" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515134635443024455 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015134635443017372 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015134627227016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015134627230015460 5ustar corecore